[NEMO-361] Consistency on indentations (#207)

JIRA: [NEMO-361: Consistency on indentations](https://issues.apache.org/jira/projects/NEMO/issues/NEMO-361)

**Major changes:**
- Runs the 'Reformat code' command on IntelliJ to auto-indent files.
- Re-organizes imports in an alphabetical order

**Minor changes to note:**
- None

**Tests for the changes:**
- None, it simply fixes the indentations and imports

**Other comments:**
- None

Closes #207
diff --git a/bin/json2dot.py b/bin/json2dot.py
index e42943d..e82277a 100755
--- a/bin/json2dot.py
+++ b/bin/json2dot.py
@@ -22,306 +22,356 @@
 This file is used as backend for https://service.jangho.io/nemo-dag/
 '''
 
-import sys
 import json
 import re
+import sys
 
 nextIdx = 0
 
+
 def propertiesToString(properties):
-    return '<BR/>'.join(['{}={}'.format(re.sub('Property$', '', item[0].split('.')[-1]), item[1]) for item in sorted(properties.items())])
+  return '<BR/>'.join(
+    ['{}={}'.format(re.sub('Property$', '', item[0].split('.')[-1]), item[1]) for item in sorted(properties.items())])
+
 
 def getIdx():
-    global nextIdx
-    nextIdx += 1
-    return nextIdx
+  global nextIdx
+  nextIdx += 1
+  return nextIdx
+
 
 def stateToColor(state):
-    try:
-        return {'READY': '#fffbe2',
-                'EXECUTING': '#e2fbff',
-                'COMPLETE': '#e2ffe5',
-                'FAILED_RECOVERABLE': '#ffe2e2',
-                'FAILED_UNRECOVERABLE': '#e2e2e2'}[state]
-    except:
-        return 'white'
+  try:
+    return {'READY': '#fffbe2',
+            'EXECUTING': '#e2fbff',
+            'COMPLETE': '#e2ffe5',
+            'FAILED_RECOVERABLE': '#ffe2e2',
+            'FAILED_UNRECOVERABLE': '#e2e2e2'}[state]
+  except:
+    return 'white'
+
 
 class PlanState:
-    def __init__(self, data):
-        self.id = data['planId']
-        self.stages = {}
-        for stage in data['stages']:
-            self.stages[stage['id']] = StageState(stage)
-    @classmethod
-    def empty(cls):
-        return cls({'planId': None, 'stages': []})
-    def get(self, id):
-        try:
-            return self.stages[id]
-        except:
-            return StageState.empty()
+  def __init__(self, data):
+    self.id = data['planId']
+    self.stages = {}
+    for stage in data['stages']:
+      self.stages[stage['id']] = StageState(stage)
+
+  @classmethod
+  def empty(cls):
+    return cls({'planId': None, 'stages': []})
+
+  def get(self, id):
+    try:
+      return self.stages[id]
+    except:
+      return StageState.empty()
+
 
 class StageState:
-    def __init__(self, data):
-        self.id = data['id']
-        self.state = data['state']
-        self.tasks = {}
-        for irVertex in data['tasks']:
-            self.tasks[irVertex['id']] = TaskState(irVertex)
-    @classmethod
-    def empty(cls):
-        return cls({'id': None, 'state': None, 'tasks': []})
-    def get(self, id):
-        try:
-            return self.tasks[id]
-        except:
-            return TaskState.empty()
-    @property
-    def taskStateSummary(self):
-        stateToNumTasks = dict()
-        for taskState in self.tasks.values():
-            before = stateToNumTasks.get(taskState.state, 0)
-            stateToNumTasks[taskState.state] = before + 1
-        return '\\n'.join(['{}: {}'.format(state, stateToNumTasks[state])
-            for state in stateToNumTasks.keys()])
+  def __init__(self, data):
+    self.id = data['id']
+    self.state = data['state']
+    self.tasks = {}
+    for irVertex in data['tasks']:
+      self.tasks[irVertex['id']] = TaskState(irVertex)
+
+  @classmethod
+  def empty(cls):
+    return cls({'id': None, 'state': None, 'tasks': []})
+
+  def get(self, id):
+    try:
+      return self.tasks[id]
+    except:
+      return TaskState.empty()
+
+  @property
+  def taskStateSummary(self):
+    stateToNumTasks = dict()
+    for taskState in self.tasks.values():
+      before = stateToNumTasks.get(taskState.state, 0)
+      stateToNumTasks[taskState.state] = before + 1
+    return '\\n'.join(['{}: {}'.format(state, stateToNumTasks[state])
+                       for state in stateToNumTasks.keys()])
+
 
 class TaskState:
-    def __init__(self, data):
-        self.id = data['id']
-        self.state = data['state']
-    @classmethod
-    def empty(cls):
-        return cls({'id': None, 'state': None})
+  def __init__(self, data):
+    self.id = data['id']
+    self.state = data['state']
+
+  @classmethod
+  def empty(cls):
+    return cls({'id': None, 'state': None})
+
 
 class DAG:
-    '''
-    A class for converting DAG to Graphviz representation.
-    JSON representation should be formatted like what toString method in DAG.java does.
-    '''
-    def __init__(self, dag, planState):
-        self.vertices = {}
-        self.edges = []
-        for vertex in dag['vertices']:
-            self.vertices[vertex['id']] = Vertex(vertex['id'], vertex['properties'], planState.get(vertex['id']))
-        for edge in dag['edges']:
-            self.edges.append(Edge(self.vertices[edge['src']], self.vertices[edge['dst']], edge['properties']))
-    @property
-    def dot(self):
-        dot = ''
-        for vertex in self.vertices.values():
-            dot += vertex.dot
-        for edge in self.edges:
-            dot += edge.dot
-        return dot
+  '''
+  A class for converting DAG to Graphviz representation.
+  JSON representation should be formatted like what toString method in DAG.java does.
+  '''
+
+  def __init__(self, dag, planState):
+    self.vertices = {}
+    self.edges = []
+    for vertex in dag['vertices']:
+      self.vertices[vertex['id']] = Vertex(vertex['id'], vertex['properties'], planState.get(vertex['id']))
+    for edge in dag['edges']:
+      self.edges.append(Edge(self.vertices[edge['src']], self.vertices[edge['dst']], edge['properties']))
+
+  @property
+  def dot(self):
+    dot = ''
+    for vertex in self.vertices.values():
+      dot += vertex.dot
+    for edge in self.edges:
+      dot += edge.dot
+    return dot
+
 
 def Vertex(id, properties, state):
-    try:
-        return Stage(id, properties, state)
-    except:
-        pass
-    try:
-        return LoopVertex(id, properties)
-    except:
-        pass
-    return NormalVertex(id, properties, state)
+  try:
+    return Stage(id, properties, state)
+  except:
+    pass
+  try:
+    return LoopVertex(id, properties)
+  except:
+    pass
+  return NormalVertex(id, properties, state)
+
 
 class NormalVertex:
-    def __init__(self, id, properties, state):
-        self.id = id
-        self.properties = properties
-        self.idx = getIdx()
-        self.state = state.state
-    @property
-    def dot(self):
-        color = 'black'
-        try:
-            placement = self.properties['executionProperties']['org.apache.nemo.common.ir.vertex.executionproperty.ResourcePriorityProperty']
-            if (placement == 'Transient'):
-                color = 'orange'
-            if (placement == 'Reserved'):
-                color = 'green'
-        except:
-            pass
-        label = self.id
-        if self.state is not None:
-            label += '<BR/>({})'.format(self.state)
-        try:
-            label += '<BR/>{}'.format(self.properties['source'])
-        except:
-            pass
-        try:
-            transform = self.properties['transform'].split(':')
-            transform_name = transform[0]
-            try:
-                class_name = transform[1].split('{')[0].split('.')[-1].split('$')[0].split('@')[0]
-            except IndexError:
-                class_name = '?'
-            label += '<BR/>{}:{}'.format(transform_name, class_name)
-        except:
-            pass
-        if ('class' in self.properties and self.properties['class'] == 'AggregationBarrierVertex'):
-            shape = ', shape=box'
-            label += '<BR/>AggregationBarrier'
-        else:
-            shape = ''
-        try:
-            label += '<BR/><FONT POINT-SIZE=\'10\'>{}</FONT>'.format(propertiesToString(self.properties['executionProperties']))
-        except:
-            pass
-        dot = '{} [label=<{}>, color={}, style=filled, fillcolor="{}"{}];'.format(self.idx, label, color, stateToColor(self.state), shape)
-        return dot
-    @property
-    def oneVertex(self):
-        return self
-    @property
-    def logicalEnd(self):
-        return self.idx
+  def __init__(self, id, properties, state):
+    self.id = id
+    self.properties = properties
+    self.idx = getIdx()
+    self.state = state.state
+
+  @property
+  def dot(self):
+    color = 'black'
+    try:
+      placement = self.properties['executionProperties'][
+        'org.apache.nemo.common.ir.vertex.executionproperty.ResourcePriorityProperty']
+      if (placement == 'Transient'):
+        color = 'orange'
+      if (placement == 'Reserved'):
+        color = 'green'
+    except:
+      pass
+    label = self.id
+    if self.state is not None:
+      label += '<BR/>({})'.format(self.state)
+    try:
+      label += '<BR/>{}'.format(self.properties['source'])
+    except:
+      pass
+    try:
+      transform = self.properties['transform'].split(':')
+      transform_name = transform[0]
+      try:
+        class_name = transform[1].split('{')[0].split('.')[-1].split('$')[0].split('@')[0]
+      except IndexError:
+        class_name = '?'
+      label += '<BR/>{}:{}'.format(transform_name, class_name)
+    except:
+      pass
+    if ('class' in self.properties and self.properties['class'] == 'AggregationBarrierVertex'):
+      shape = ', shape=box'
+      label += '<BR/>AggregationBarrier'
+    else:
+      shape = ''
+    try:
+      label += '<BR/><FONT POINT-SIZE=\'10\'>{}</FONT>'.format(
+        propertiesToString(self.properties['executionProperties']))
+    except:
+      pass
+    dot = '{} [label=<{}>, color={}, style=filled, fillcolor="{}"{}];'.format(self.idx, label, color,
+                                                                              stateToColor(self.state), shape)
+    return dot
+
+  @property
+  def oneVertex(self):
+    return self
+
+  @property
+  def logicalEnd(self):
+    return self.idx
+
 
 class LoopVertex:
-    def __init__(self, id, properties):
-        self.id = id
-        self.dag = DAG(properties['DAG'], PlanState.empty())
-        self.remaining_iteration = properties['remainingIteration']
-        self.executionProperties = properties['executionProperties']
-        self.incoming = properties['dagIncomingEdges']
-        self.outgoing = properties['dagOutgoingEdges']
-        self.edgeMapping = properties['edgeWithLoopToEdgeWithInternalVertex']
-        self.idx = getIdx()
-    @property
-    def dot(self):
-        label = self.id
-        try:
-            label += '<BR/><FONT POINT-SIZE=\'10\'>{}</FONT>'.format(propertiesToString(self.executionProperties))
-        except:
-            pass
-        label += '<BR/>(Remaining iteration: {})'.format(self.remaining_iteration)
-        dot = 'subgraph cluster_{} {{'.format(self.idx)
-        dot += 'label = "{}";'.format(label)
-        dot += self.dag.dot
-        dot += '}'
-        return dot
-    @property
-    def oneVertex(self):
-        return next(iter(self.dag.vertices.values())).oneVertex
-    @property
-    def logicalEnd(self):
-        return 'cluster_{}'.format(self.idx)
-    def internalSrcFor(self, edgeWithLoopId):
-        edgeId = self.edgeMapping[edgeWithLoopId]
-        vertexId = list(filter(lambda v: edgeId in self.outgoing[v], self.outgoing))[0]
-        return self.dag.vertices[vertexId]
-    def internalDstFor(self, edgeWithLoopId):
-        edgeId = self.edgeMapping[edgeWithLoopId]
-        vertexId = list(filter(lambda v: edgeId in self.incoming[v], self.incoming))[0]
-        return self.dag.vertices[vertexId]
+  def __init__(self, id, properties):
+    self.id = id
+    self.dag = DAG(properties['DAG'], PlanState.empty())
+    self.remaining_iteration = properties['remainingIteration']
+    self.executionProperties = properties['executionProperties']
+    self.incoming = properties['dagIncomingEdges']
+    self.outgoing = properties['dagOutgoingEdges']
+    self.edgeMapping = properties['edgeWithLoopToEdgeWithInternalVertex']
+    self.idx = getIdx()
+
+  @property
+  def dot(self):
+    label = self.id
+    try:
+      label += '<BR/><FONT POINT-SIZE=\'10\'>{}</FONT>'.format(propertiesToString(self.executionProperties))
+    except:
+      pass
+    label += '<BR/>(Remaining iteration: {})'.format(self.remaining_iteration)
+    dot = 'subgraph cluster_{} {{'.format(self.idx)
+    dot += 'label = "{}";'.format(label)
+    dot += self.dag.dot
+    dot += '}'
+    return dot
+
+  @property
+  def oneVertex(self):
+    return next(iter(self.dag.vertices.values())).oneVertex
+
+  @property
+  def logicalEnd(self):
+    return 'cluster_{}'.format(self.idx)
+
+  def internalSrcFor(self, edgeWithLoopId):
+    edgeId = self.edgeMapping[edgeWithLoopId]
+    vertexId = list(filter(lambda v: edgeId in self.outgoing[v], self.outgoing))[0]
+    return self.dag.vertices[vertexId]
+
+  def internalDstFor(self, edgeWithLoopId):
+    edgeId = self.edgeMapping[edgeWithLoopId]
+    vertexId = list(filter(lambda v: edgeId in self.incoming[v], self.incoming))[0]
+    return self.dag.vertices[vertexId]
+
 
 class Stage:
-    def __init__(self, id, properties, state):
-        self.id = id
-        self.properties = properties
-        self.stageDAG = DAG(properties['irDag'], PlanState.empty())
-        self.idx = getIdx()
-        self.state = state
-        self.executionProperties = self.properties['executionProperties']
-    @property
-    def dot(self):
-        if self.state.state is None:
-            state = ''
-        else:
-            state = ' ({})'.format(self.state.state)
-        label = '{}{}'.format(self.id, state)
-        if self.state.tasks:
-            label += '<BR/><BR/>{} Task(s):<BR/>{}'.format(len(self.state.tasks), self.state.taskStateSummary)
-        label += '<BR/><FONT POINT-SIZE=\'10\'>{}</FONT>'.format(propertiesToString(self.executionProperties))
-        dot = 'subgraph cluster_{} {{'.format(self.idx)
-        dot += 'label = <{}>;'.format(label)
-        dot += 'color=red; bgcolor="{}";'.format(stateToColor(self.state.state))
-        dot += self.stageDAG.dot
-        dot += '}'
-        return dot
-    @property
-    def oneVertex(self):
-        return next(iter(self.stageDAG.vertices.values())).oneVertex
-    @property
-    def logicalEnd(self):
-        return 'cluster_{}'.format(self.idx)
+  def __init__(self, id, properties, state):
+    self.id = id
+    self.properties = properties
+    self.stageDAG = DAG(properties['irDag'], PlanState.empty())
+    self.idx = getIdx()
+    self.state = state
+    self.executionProperties = self.properties['executionProperties']
+
+  @property
+  def dot(self):
+    if self.state.state is None:
+      state = ''
+    else:
+      state = ' ({})'.format(self.state.state)
+    label = '{}{}'.format(self.id, state)
+    if self.state.tasks:
+      label += '<BR/><BR/>{} Task(s):<BR/>{}'.format(len(self.state.tasks), self.state.taskStateSummary)
+    label += '<BR/><FONT POINT-SIZE=\'10\'>{}</FONT>'.format(propertiesToString(self.executionProperties))
+    dot = 'subgraph cluster_{} {{'.format(self.idx)
+    dot += 'label = <{}>;'.format(label)
+    dot += 'color=red; bgcolor="{}";'.format(stateToColor(self.state.state))
+    dot += self.stageDAG.dot
+    dot += '}'
+    return dot
+
+  @property
+  def oneVertex(self):
+    return next(iter(self.stageDAG.vertices.values())).oneVertex
+
+  @property
+  def logicalEnd(self):
+    return 'cluster_{}'.format(self.idx)
+
 
 def Edge(src, dst, properties):
-    try:
-        return StageEdge(src, dst, properties)
-    except:
-        pass
-    try:
-        return RuntimeEdge(src, dst, properties)
-    except:
-        pass
-    try:
-        return IREdge(src, dst, properties)
-    except:
-        pass
-    return NormalEdge(src, dst, properties)
+  try:
+    return StageEdge(src, dst, properties)
+  except:
+    pass
+  try:
+    return RuntimeEdge(src, dst, properties)
+  except:
+    pass
+  try:
+    return IREdge(src, dst, properties)
+  except:
+    pass
+  return NormalEdge(src, dst, properties)
+
 
 class NormalEdge:
-    def __init__(self, src, dst, properties):
-        self.src = src
-        self.dst = dst
-    @property
-    def dot(self):
-        return '{} -> {} [ltail = {}, lhead = {}];'.format(self.src.oneVertex.idx, self.dst.oneVertex.idx,
-                self.src.logicalEnd, self.dst.logicalEnd)
+  def __init__(self, src, dst, properties):
+    self.src = src
+    self.dst = dst
+
+  @property
+  def dot(self):
+    return '{} -> {} [ltail = {}, lhead = {}];'.format(self.src.oneVertex.idx, self.dst.oneVertex.idx,
+                                                       self.src.logicalEnd, self.dst.logicalEnd)
+
 
 class IREdge:
-    def __init__(self, src, dst, properties):
-        self.src = src
-        self.dst = dst
-        self.id = properties['id']
-        self.executionProperties = properties['executionProperties']
-    @property
-    def dot(self):
-        src = self.src
-        dst = self.dst
-        try:
-            src = src.internalSrcFor(self.id)
-        except:
-            pass
-        try:
-            dst = dst.internalDstFor(self.id)
-        except:
-            pass
-        label = '{}<BR/><FONT POINT-SIZE=\'8\'>{}</FONT>'.format(self.id, propertiesToString(self.executionProperties))
-        return '{} -> {} [ltail = {}, lhead = {}, label = <{}>];'.format(src.oneVertex.idx,
-                dst.oneVertex.idx, src.logicalEnd, dst.logicalEnd, label)
+  def __init__(self, src, dst, properties):
+    self.src = src
+    self.dst = dst
+    self.id = properties['id']
+    self.executionProperties = properties['executionProperties']
+
+  @property
+  def dot(self):
+    src = self.src
+    dst = self.dst
+    try:
+      src = src.internalSrcFor(self.id)
+    except:
+      pass
+    try:
+      dst = dst.internalDstFor(self.id)
+    except:
+      pass
+    label = '{}<BR/><FONT POINT-SIZE=\'8\'>{}</FONT>'.format(self.id, propertiesToString(self.executionProperties))
+    return '{} -> {} [ltail = {}, lhead = {}, label = <{}>];'.format(src.oneVertex.idx,
+                                                                     dst.oneVertex.idx, src.logicalEnd, dst.logicalEnd,
+                                                                     label)
+
 
 class StageEdge:
-    def __init__(self, src, dst, properties):
-        self.src = src.stageDAG.vertices[properties['externalSrcVertexId']]
-        self.dst = dst.stageDAG.vertices[properties['externalDstVertexId']]
-        self.runtimeEdgeId = properties['runtimeEdgeId']
-        self.executionProperties = properties['executionProperties']
-    @property
-    def dot(self):
-        label = '{}<BR/><FONT POINT-SIZE=\'8\'>{}</FONT>'.format(self.runtimeEdgeId, propertiesToString(self.executionProperties))
-        return '{} -> {} [ltail = {}, lhead = {}, label = <{}>];'.format(self.src.oneVertex.idx,
-                self.dst.oneVertex.idx, self.src.logicalEnd, self.dst.logicalEnd, label)
+  def __init__(self, src, dst, properties):
+    self.src = src.stageDAG.vertices[properties['externalSrcVertexId']]
+    self.dst = dst.stageDAG.vertices[properties['externalDstVertexId']]
+    self.runtimeEdgeId = properties['runtimeEdgeId']
+    self.executionProperties = properties['executionProperties']
+
+  @property
+  def dot(self):
+    label = '{}<BR/><FONT POINT-SIZE=\'8\'>{}</FONT>'.format(self.runtimeEdgeId,
+                                                             propertiesToString(self.executionProperties))
+    return '{} -> {} [ltail = {}, lhead = {}, label = <{}>];'.format(self.src.oneVertex.idx,
+                                                                     self.dst.oneVertex.idx, self.src.logicalEnd,
+                                                                     self.dst.logicalEnd, label)
+
 
 class RuntimeEdge:
-    def __init__(self, src, dst, properties):
-        self.src = src
-        self.dst = dst
-        self.runtimeEdgeId = properties['runtimeEdgeId']
-        self.executionProperties = properties['executionProperties']
-    @property
-    def dot(self):
-        label = '{}<BR/><FONT POINT-SIZE=\'8\'>{}</FONT>'.format(self.runtimeEdgeId, propertiesToString(self.executionProperties))
-        return '{} -> {} [ltail = {}, lhead = {}, label = <{}>];'.format(self.src.oneVertex.idx,
-                self.dst.oneVertex.idx, self.src.logicalEnd, self.dst.logicalEnd, label)
+  def __init__(self, src, dst, properties):
+    self.src = src
+    self.dst = dst
+    self.runtimeEdgeId = properties['runtimeEdgeId']
+    self.executionProperties = properties['executionProperties']
+
+  @property
+  def dot(self):
+    label = '{}<BR/><FONT POINT-SIZE=\'8\'>{}</FONT>'.format(self.runtimeEdgeId,
+                                                             propertiesToString(self.executionProperties))
+    return '{} -> {} [ltail = {}, lhead = {}, label = <{}>];'.format(self.src.oneVertex.idx,
+                                                                     self.dst.oneVertex.idx, self.src.logicalEnd,
+                                                                     self.dst.logicalEnd, label)
+
 
 def jsonToDot(jsonDict):
-    try:
-        dag = DAG(jsonDict['dag'], PlanState(jsonDict['planState']))
-    except:
-        dag = DAG(jsonDict, PlanState.empty())
-    return 'digraph dag {compound=true; nodesep=1.0; forcelabels=true;' + dag.dot + '}'
+  try:
+    dag = DAG(jsonDict['dag'], PlanState(jsonDict['planState']))
+  except:
+    dag = DAG(jsonDict, PlanState.empty())
+  return 'digraph dag {compound=true; nodesep=1.0; forcelabels=true;' + dag.dot + '}'
+
 
 if __name__ == "__main__":
-    print(jsonToDot(json.loads(sys.stdin.read())))
+  print(jsonToDot(json.loads(sys.stdin.read())))
diff --git a/bin/metric-parser.py b/bin/metric-parser.py
index 898c34c..f7ef572 100755
--- a/bin/metric-parser.py
+++ b/bin/metric-parser.py
@@ -17,71 +17,78 @@
 # specific language governing permissions and limitations
 # under the License.
 
-import sys
-import os
 import json
 import numpy as np
+import os
+import sys
+
 
 def main():
-   try:
-       filepath = sys.argv[1]
-   except IndexError:
-       print("Please provide the file path for the metric log file.")
-   else:
-       if not os.path.isfile(filepath):
-           print("File path {} does not exist. Exiting...".format(filepath))
-           sys.exit()
+  try:
+    filepath = sys.argv[1]
+  except IndexError:
+    print("Please provide the file path for the metric log file.")
+  else:
+    if not os.path.isfile(filepath):
+      print("File path {} does not exist. Exiting...".format(filepath))
+      sys.exit()
 
-       metricDictionary = dict()
-       vertexToMetricDict = dict()
-       with open(filepath, 'r') as fp:
-           for line in fp:
-               metricInJson = json.loads(line)
-               metricKey = metricInJson["computationUnitId"]
-               metricDictionary[metricKey] = metricInJson["metricList"]
-               if metricKey.find('Task-vertex-') != -1: # Vertex metric
-                   vertexIdSuffix = metricKey.split('Task-vertex-')[1]
-                   if vertexIdSuffix.find('_') != -1: # physical level metric
-                       vertexId = 'vertex-' + vertexIdSuffix.split('_')[0]
-                       metricDictList = metricDictionary[metricKey]
-                       if isinstance(metricDictList, dict):
-                           metricDictList = [metricDictList]
-                       for metricDict in metricDictList:
-                           for key, value in metricDict.items():
-                               if (key != 'EndTime') & (key != 'StartTime'):
-                                   vertexMetricDict = vertexToMetricDict.get(vertexId, dict())
-                                   vertexMetricDictValueList = vertexMetricDict.get(key, [])
-                                   vertexMetricDictValueList.append(value)
-                                   vertexMetricDict[key] = vertexMetricDictValueList
-                                   vertexToMetricDict[vertexId] = vertexMetricDict
+    metricDictionary = dict()
+    vertexToMetricDict = dict()
+    with open(filepath, 'r') as fp:
+      for line in fp:
+        metricInJson = json.loads(line)
+        metricKey = metricInJson["computationUnitId"]
+        metricDictionary[metricKey] = metricInJson["metricList"]
+        if metricKey.find('Task-vertex-') != -1:  # Vertex metric
+          vertexIdSuffix = metricKey.split('Task-vertex-')[1]
+          if vertexIdSuffix.find('_') != -1:  # physical level metric
+            vertexId = 'vertex-' + vertexIdSuffix.split('_')[0]
+            metricDictList = metricDictionary[metricKey]
+            if isinstance(metricDictList, dict):
+              metricDictList = [metricDictList]
+            for metricDict in metricDictList:
+              for key, value in metricDict.items():
+                if (key != 'EndTime') & (key != 'StartTime'):
+                  vertexMetricDict = vertexToMetricDict.get(vertexId, dict())
+                  vertexMetricDictValueList = vertexMetricDict.get(key, [])
+                  vertexMetricDictValueList.append(value)
+                  vertexMetricDict[key] = vertexMetricDictValueList
+                  vertexToMetricDict[vertexId] = vertexMetricDict
 
-       query_metric = True
-       while(query_metric):
-           user_input = input("1 - View metric for a computation unit, 2 - View metric for all IR vertices, 3 - exit: ")
-           if user_input == "1":
-               computationUnitId = input("Enter computation unit ID: ")
-               for metric in metricDictionary[computationUnitId]:
-                   print(metric)
-           elif user_input == "2":
-               for vertexId, metricDict in sorted(vertexToMetricDict.items()):
-                   print(vertexId)
-                   metricKeys, valuesMin, valuesMedian, valuesMax, valuesMean, valuesSum = ['Metric'], ['Min'], ['Median'], ['Max'], ['Mean'], ['Total']
-                   for metricKey, metricValues in metricDict.items():
-                       metricKeys.append(metricKey)
-                       valuesMin.append(str(np.min(metricValues)))
-                       valuesMedian.append(str(np.median(metricValues)))
-                       valuesMax.append(str(np.max(metricValues)))
-                       valuesMean.append(str(np.mean(metricValues)))
-                       valuesSum.append(str(np.sum(metricValues)))
-                   padding = 1
-                   widthKey, widthMin, widthMedian, widthMax, widthMean, widthSum = map(lambda x:len(max(x, key=len)) + padding, [metricKeys, valuesMin, valuesMedian, valuesMax, valuesMean, valuesSum])
-                   templete = '{:<%s} {:<%s} {:<%s} {:<%s} {:<%s} {:<%s}' % (widthKey, widthMin, widthMedian, widthMax, widthMean, widthSum)
-                   for metricKey, valueMin, valueMedian, valueMax, valueMean, valueSum in zip(metricKeys, valuesMin, valuesMedian, valuesMax, valuesMean, valuesSum):
-                    print(templete.format(metricKey, valueMin, valueMedian, valueMax, valueMean, valueSum))
-           else:
-               print ("Exiting metric parser")
-               query_metric = False
+    query_metric = True
+    while (query_metric):
+      user_input = input("1 - View metric for a computation unit, 2 - View metric for all IR vertices, 3 - exit: ")
+      if user_input == "1":
+        computationUnitId = input("Enter computation unit ID: ")
+        for metric in metricDictionary[computationUnitId]:
+          print(metric)
+      elif user_input == "2":
+        for vertexId, metricDict in sorted(vertexToMetricDict.items()):
+          print(vertexId)
+          metricKeys, valuesMin, valuesMedian, valuesMax, valuesMean, valuesSum = ['Metric'], ['Min'], ['Median'], [
+            'Max'], ['Mean'], ['Total']
+          for metricKey, metricValues in metricDict.items():
+            metricKeys.append(metricKey)
+            valuesMin.append(str(np.min(metricValues)))
+            valuesMedian.append(str(np.median(metricValues)))
+            valuesMax.append(str(np.max(metricValues)))
+            valuesMean.append(str(np.mean(metricValues)))
+            valuesSum.append(str(np.sum(metricValues)))
+          padding = 1
+          widthKey, widthMin, widthMedian, widthMax, widthMean, widthSum = map(lambda x: len(max(x, key=len)) + padding,
+                                                                               [metricKeys, valuesMin, valuesMedian,
+                                                                                valuesMax, valuesMean, valuesSum])
+          templete = '{:<%s} {:<%s} {:<%s} {:<%s} {:<%s} {:<%s}' % (
+            widthKey, widthMin, widthMedian, widthMax, widthMean, widthSum)
+          for metricKey, valueMin, valueMedian, valueMax, valueMean, valueSum in zip(metricKeys, valuesMin,
+                                                                                     valuesMedian, valuesMax,
+                                                                                     valuesMean, valuesSum):
+            print(templete.format(metricKey, valueMin, valueMedian, valueMax, valueMean, valueSum))
+      else:
+        print("Exiting metric parser")
+        query_metric = False
 
 
 if __name__ == '__main__':
-   main()
+  main()
diff --git a/checkstyle.xml b/checkstyle.xml
index c7f8761..7f97abc 100644
--- a/checkstyle.xml
+++ b/checkstyle.xml
@@ -18,8 +18,8 @@
 under the License.
 -->
 <!DOCTYPE module PUBLIC
-        "-//Puppy Crawl//DTD Check Configuration 1.3//EN"
-        "http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
+  "-//Puppy Crawl//DTD Check Configuration 1.3//EN"
+  "http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
 
 <!--
   Checkstyle configuration that checks the sun coding conventions from:
@@ -38,148 +38,148 @@
 -->
 
 <module name="Checker">
-    <!--
-        If you set the basedir property below, then all reported file
-        names will be relative to the specified directory. See
-        http://checkstyle.sourceforge.net/5.x/config.html#Checker
-        <property name="basedir" value="${basedir}"/>
-    -->
-    <property name="charset" value="UTF-8"/>
+  <!--
+      If you set the basedir property below, then all reported file
+      names will be relative to the specified directory. See
+      http://checkstyle.sourceforge.net/5.x/config.html#Checker
+      <property name="basedir" value="${basedir}"/>
+  -->
+  <property name="charset" value="UTF-8"/>
 
-    <property name="fileExtensions" value="java, properties, xml"/>
+  <property name="fileExtensions" value="java, properties, xml"/>
 
-    <!-- Checks that a package-info.java file exists for each package.     -->
-    <!-- See http://checkstyle.sf.net/config_javadoc.html#JavadocPackage -->
-    <!--<module name="JavadocPackage"/>-->
+  <!-- Checks that a package-info.java file exists for each package.     -->
+  <!-- See http://checkstyle.sf.net/config_javadoc.html#JavadocPackage -->
+  <!--<module name="JavadocPackage"/>-->
 
-    <!-- Checks whether files end with a new line.                        -->
-    <!-- See http://checkstyle.sf.net/config_misc.html#NewlineAtEndOfFile -->
-    <module name="NewlineAtEndOfFile"/>
+  <!-- Checks whether files end with a new line.                        -->
+  <!-- See http://checkstyle.sf.net/config_misc.html#NewlineAtEndOfFile -->
+  <module name="NewlineAtEndOfFile"/>
 
-    <!-- Checks that property files contain the same keys.         -->
-    <!-- See http://checkstyle.sf.net/config_misc.html#Translation -->
-    <module name="Translation"/>
+  <!-- Checks that property files contain the same keys.         -->
+  <!-- See http://checkstyle.sf.net/config_misc.html#Translation -->
+  <module name="Translation"/>
+
+  <!-- Checks for Size Violations.                    -->
+  <!-- See http://checkstyle.sf.net/config_sizes.html -->
+  <module name="FileLength"/>
+
+  <!-- Checks for whitespace                               -->
+  <!-- See http://checkstyle.sf.net/config_whitespace.html -->
+  <module name="FileTabCharacter"/>
+
+  <!-- Miscellaneous other checks.                   -->
+  <!-- See http://checkstyle.sf.net/config_misc.html -->
+  <module name="RegexpSingleline">
+    <property name="format" value="\s+$"/>
+    <property name="minimum" value="0"/>
+    <property name="maximum" value="0"/>
+    <property name="message" value="Line has trailing spaces."/>
+  </module>
+
+  <module name="SuppressWarningsFilter"/>
+
+  <module name="TreeWalker">
+
+    <!-- Checks for Javadoc comments.                     -->
+    <!-- See http://checkstyle.sf.net/config_javadoc.html -->
+    <module name="JavadocMethod">
+      <property name="severity" value="warning"/>
+    </module>
+    <module name="JavadocType"/>
+    <!--<module name="JavadocVariable"/>-->
+    <module name="JavadocStyle"/>
+
+    <!-- Checks for Naming Conventions.                  -->
+    <!-- See http://checkstyle.sf.net/config_naming.html -->
+    <module name="ConstantName"/>
+    <module name="LocalFinalVariableName"/>
+    <module name="LocalVariableName"/>
+    <module name="MemberName"/>
+    <module name="MethodName"/>
+    <module name="PackageName"/>
+    <module name="ParameterName"/>
+    <module name="StaticVariableName"/>
+    <module name="TypeName"/>
+
+    <!-- Checks for imports                              -->
+    <!-- See http://checkstyle.sf.net/config_import.html -->
+    <!--<module name="AvoidStarImport"/>-->
+    <module name="IllegalImport"/> <!-- defaults to sun.* packages -->
+    <module name="RedundantImport"/>
+    <module name="UnusedImports">
+      <property name="processJavadoc" value="false"/>
+    </module>
 
     <!-- Checks for Size Violations.                    -->
     <!-- See http://checkstyle.sf.net/config_sizes.html -->
-    <module name="FileLength"/>
+    <module name="LineLength">
+      <property name="max" value="120"/>
+      <property name="ignorePattern" value="^package.*|^import.*|a href|href|http://|https://|ftp://"/>
+    </module>
+    <module name="MethodLength"/>
 
     <!-- Checks for whitespace                               -->
     <!-- See http://checkstyle.sf.net/config_whitespace.html -->
-    <module name="FileTabCharacter"/>
+    <module name="EmptyForIteratorPad"/>
+    <module name="GenericWhitespace"/>
+    <module name="MethodParamPad"/>
+    <module name="NoWhitespaceAfter"/>
+    <module name="NoWhitespaceBefore"/>
+    <module name="OperatorWrap"/>
+    <module name="ParenPad"/>
+    <module name="TypecastParenPad"/>
+    <module name="WhitespaceAfter"/>
+    <module name="WhitespaceAround"/>
+
+    <!-- Modifier Checks                                    -->
+    <!-- See http://checkstyle.sf.net/config_modifiers.html -->
+    <module name="ModifierOrder"/>
+    <module name="RedundantModifier"/>
+
+    <!-- Checks for blocks. You know, those {}'s         -->
+    <!-- See http://checkstyle.sf.net/config_blocks.html -->
+    <module name="AvoidNestedBlocks"/>
+    <module name="EmptyBlock"/>
+    <module name="LeftCurly"/>
+    <module name="NeedBraces"/>
+    <module name="RightCurly"/>
+
+    <!-- Checks for common coding problems               -->
+    <!-- See http://checkstyle.sf.net/config_coding.html -->
+    <!--<module name="AvoidInlineConditionals"/>-->
+    <module name="EmptyStatement"/>
+    <module name="EqualsHashCode"/>
+    <module name="HiddenField">
+      <property name="ignoreSetter" value="true"/>
+      <property name="ignoreConstructorParameter" value="true"/>
+    </module>
+    <module name="IllegalInstantiation"/>
+    <module name="InnerAssignment"/>
+    <!--<module name="MagicNumber"/>-->
+    <module name="MissingSwitchDefault"/>
+    <module name="SimplifyBooleanExpression"/>
+    <module name="SimplifyBooleanReturn"/>
+
+    <!-- Checks for class design                         -->
+    <!-- See http://checkstyle.sf.net/config_design.html -->
+    <module name="DesignForExtension"/>
+    <module name="FinalClass"/>
+    <module name="HideUtilityClassConstructor"/>
+    <module name="InterfaceIsType"/>
+    <module name="VisibilityModifier"/>
 
     <!-- Miscellaneous other checks.                   -->
     <!-- See http://checkstyle.sf.net/config_misc.html -->
-    <module name="RegexpSingleline">
-        <property name="format" value="\s+$"/>
-        <property name="minimum" value="0"/>
-        <property name="maximum" value="0"/>
-        <property name="message" value="Line has trailing spaces."/>
-    </module>
+    <module name="ArrayTypeStyle"/>
+    <module name="FinalParameters"/>
+    <module name="TodoComment"/>
+    <module name="UpperEll"/>
 
-    <module name="SuppressWarningsFilter" />
+    <module name="SuppressWarningsHolder"/>
+  </module>
 
-    <module name="TreeWalker">
-
-        <!-- Checks for Javadoc comments.                     -->
-        <!-- See http://checkstyle.sf.net/config_javadoc.html -->
-        <module name="JavadocMethod">
-            <property name="severity" value="warning"/>
-        </module>
-        <module name="JavadocType"/>
-        <!--<module name="JavadocVariable"/>-->
-        <module name="JavadocStyle"/>
-
-        <!-- Checks for Naming Conventions.                  -->
-        <!-- See http://checkstyle.sf.net/config_naming.html -->
-        <module name="ConstantName"/>
-        <module name="LocalFinalVariableName"/>
-        <module name="LocalVariableName"/>
-        <module name="MemberName"/>
-        <module name="MethodName"/>
-        <module name="PackageName"/>
-        <module name="ParameterName"/>
-        <module name="StaticVariableName"/>
-        <module name="TypeName"/>
-
-        <!-- Checks for imports                              -->
-        <!-- See http://checkstyle.sf.net/config_import.html -->
-        <!--<module name="AvoidStarImport"/>-->
-        <module name="IllegalImport"/> <!-- defaults to sun.* packages -->
-        <module name="RedundantImport"/>
-        <module name="UnusedImports">
-            <property name="processJavadoc" value="false"/>
-        </module>
-
-        <!-- Checks for Size Violations.                    -->
-        <!-- See http://checkstyle.sf.net/config_sizes.html -->
-        <module name="LineLength">
-            <property name="max" value="120"/>
-            <property name="ignorePattern" value="^package.*|^import.*|a href|href|http://|https://|ftp://"/>
-        </module>
-        <module name="MethodLength"/>
-
-        <!-- Checks for whitespace                               -->
-        <!-- See http://checkstyle.sf.net/config_whitespace.html -->
-        <module name="EmptyForIteratorPad"/>
-        <module name="GenericWhitespace"/>
-        <module name="MethodParamPad"/>
-        <module name="NoWhitespaceAfter"/>
-        <module name="NoWhitespaceBefore"/>
-        <module name="OperatorWrap"/>
-        <module name="ParenPad"/>
-        <module name="TypecastParenPad"/>
-        <module name="WhitespaceAfter"/>
-        <module name="WhitespaceAround"/>
-
-        <!-- Modifier Checks                                    -->
-        <!-- See http://checkstyle.sf.net/config_modifiers.html -->
-        <module name="ModifierOrder"/>
-        <module name="RedundantModifier"/>
-
-        <!-- Checks for blocks. You know, those {}'s         -->
-        <!-- See http://checkstyle.sf.net/config_blocks.html -->
-        <module name="AvoidNestedBlocks"/>
-        <module name="EmptyBlock"/>
-        <module name="LeftCurly"/>
-        <module name="NeedBraces"/>
-        <module name="RightCurly"/>
-
-        <!-- Checks for common coding problems               -->
-        <!-- See http://checkstyle.sf.net/config_coding.html -->
-        <!--<module name="AvoidInlineConditionals"/>-->
-        <module name="EmptyStatement"/>
-        <module name="EqualsHashCode"/>
-        <module name="HiddenField">
-            <property name="ignoreSetter" value="true" />
-            <property name="ignoreConstructorParameter" value="true" />
-        </module>
-        <module name="IllegalInstantiation"/>
-        <module name="InnerAssignment"/>
-        <!--<module name="MagicNumber"/>-->
-        <module name="MissingSwitchDefault"/>
-        <module name="SimplifyBooleanExpression"/>
-        <module name="SimplifyBooleanReturn"/>
-
-        <!-- Checks for class design                         -->
-        <!-- See http://checkstyle.sf.net/config_design.html -->
-        <module name="DesignForExtension"/>
-        <module name="FinalClass"/>
-        <module name="HideUtilityClassConstructor"/>
-        <module name="InterfaceIsType"/>
-        <module name="VisibilityModifier"/>
-
-        <!-- Miscellaneous other checks.                   -->
-        <!-- See http://checkstyle.sf.net/config_misc.html -->
-        <module name="ArrayTypeStyle"/>
-        <module name="FinalParameters"/>
-        <module name="TodoComment"/>
-        <module name="UpperEll"/>
-
-        <module name="SuppressWarningsHolder" />
-    </module>
-
-    <module name="RegexpHeader">
-      <property name="headerFile" value="${checkstyle.header.file}" />
-    </module>
+  <module name="RegexpHeader">
+    <property name="headerFile" value="${checkstyle.header.file}"/>
+  </module>
 </module>
diff --git a/client/pom.xml b/client/pom.xml
index c9439d2..367171b 100644
--- a/client/pom.xml
+++ b/client/pom.xml
@@ -17,60 +17,61 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-project</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-project</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
 
-    <artifactId>nemo-client</artifactId>
-    <name>Nemo Client</name>
+  <artifactId>nemo-client</artifactId>
+  <name>Nemo Client</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-driver</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-conf</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.reef</groupId>
-            <artifactId>reef-runtime-local</artifactId>
-            <version>${reef.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.reef</groupId>
-            <artifactId>reef-runtime-yarn</artifactId>
-            <version>${reef.version}</version>
-        </dependency>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-driver</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-conf</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.reef</groupId>
+      <artifactId>reef-runtime-local</artifactId>
+      <version>${reef.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.reef</groupId>
+      <artifactId>reef-runtime-yarn</artifactId>
+      <version>${reef.version}</version>
+    </dependency>
 
 
-      <!-- for nemo-beam-runner -->
-      <dependency>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-compiler-frontend-beam</artifactId>
-        <version>${project.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>com.google.auto.service</groupId>
-        <artifactId>auto-service</artifactId>
-        <version>${auto-service.version}</version>
-        <optional>true</optional>
-      </dependency>
-    </dependencies>
+    <!-- for nemo-beam-runner -->
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-compiler-frontend-beam</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.auto.service</groupId>
+      <artifactId>auto-service</artifactId>
+      <version>${auto-service.version}</version>
+      <optional>true</optional>
+    </dependency>
+  </dependencies>
 
 
   <build>
@@ -100,7 +101,7 @@
               <transformers>
                 <!-- Required for using beam-hadoop: See https://stackoverflow.com/questions/44365545
                 -->
-                <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer" />
+                <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"/>
               </transformers>
             </configuration>
           </execution>
diff --git a/client/src/main/java/org/apache/nemo/client/ClientEndpoint.java b/client/src/main/java/org/apache/nemo/client/ClientEndpoint.java
index e29b351..257d23a 100644
--- a/client/src/main/java/org/apache/nemo/client/ClientEndpoint.java
+++ b/client/src/main/java/org/apache/nemo/client/ClientEndpoint.java
@@ -50,6 +50,7 @@
 
   /**
    * Constructor.
+   *
    * @param stateTranslator translator to translate between the state of plan and corresponding.
    */
   public ClientEndpoint(final StateTranslator stateTranslator) {
@@ -146,7 +147,7 @@
    * @return the final state of this job.
    */
   public final Enum waitUntilJobFinish(final long timeout,
-                                          final TimeUnit unit) {
+                                       final TimeUnit unit) {
     if (driverEndpoint.get() != null) {
       return stateTranslator.translateState(driverEndpoint.get().waitUntilFinish(timeout, unit));
     } else {
@@ -162,7 +163,7 @@
       if (driverIsConnected) {
         final long consumedTime = System.nanoTime() - currentNano;
         return stateTranslator.translateState(driverEndpoint.get().
-            waitUntilFinish(timeout - unit.convert(consumedTime, TimeUnit.NANOSECONDS), unit));
+          waitUntilFinish(timeout - unit.convert(consumedTime, TimeUnit.NANOSECONDS), unit));
       } else {
         return PlanState.State.READY;
       }
diff --git a/client/src/main/java/org/apache/nemo/client/DriverEndpoint.java b/client/src/main/java/org/apache/nemo/client/DriverEndpoint.java
index 2b52a2c..6deb8e6 100644
--- a/client/src/main/java/org/apache/nemo/client/DriverEndpoint.java
+++ b/client/src/main/java/org/apache/nemo/client/DriverEndpoint.java
@@ -41,8 +41,9 @@
   /**
    * Construct an endpoint in driver side.
    * This method will be called by {@link ClientEndpoint}.
+   *
    * @param planStateManager of running plan.
-   * @param clientEndpoint of running plan.
+   * @param clientEndpoint   of running plan.
    */
   public DriverEndpoint(final PlanStateManager planStateManager,
                         final ClientEndpoint clientEndpoint) {
@@ -54,6 +55,7 @@
   /**
    * Get the current state of the running plan.
    * This method will be called by {@link ClientEndpoint}.
+   *
    * @return the current state of the running plan.
    */
   PlanState.State getState() {
@@ -64,8 +66,9 @@
    * Wait for this plan to be finished and return the final state.
    * It wait for at most the given time.
    * This method will be called by {@link ClientEndpoint}.
+   *
    * @param timeout of waiting.
-   * @param unit of the timeout.
+   * @param unit    of the timeout.
    * @return the final state of this plan.
    */
   PlanState.State waitUntilFinish(final long timeout,
@@ -76,6 +79,7 @@
   /**
    * Wait for this plan to be finished and return the final state.
    * This method will be called by {@link ClientEndpoint}.
+   *
    * @return the final state of this plan.
    */
   PlanState.State waitUntilFinish() {
diff --git a/client/src/main/java/org/apache/nemo/client/DriverRPCServer.java b/client/src/main/java/org/apache/nemo/client/DriverRPCServer.java
index 29db74c..5d7b998 100644
--- a/client/src/main/java/org/apache/nemo/client/DriverRPCServer.java
+++ b/client/src/main/java/org/apache/nemo/client/DriverRPCServer.java
@@ -48,7 +48,7 @@
 @NotThreadSafe
 public final class DriverRPCServer {
   private final Map<ControlMessage.DriverToClientMessageType, EventHandler<ControlMessage.DriverToClientMessage>>
-      handlers = new HashMap<>();
+    handlers = new HashMap<>();
   private boolean isRunning = false;
   private boolean isShutdown = false;
   private Transport transport;
@@ -59,7 +59,8 @@
 
   /**
    * Registers handler for the given type of message.
-   * @param type the type of message
+   *
+   * @param type    the type of message
    * @param handler handler implementation
    * @return {@code this}
    */
@@ -88,7 +89,7 @@
       injector.bindVolatileParameter(RemoteConfiguration.HostAddress.class, host);
       injector.bindVolatileParameter(RemoteConfiguration.Port.class, 0);
       injector.bindVolatileParameter(RemoteConfiguration.RemoteServerStage.class,
-          new SyncStage<>(new ServerEventHandler()));
+        new SyncStage<>(new ServerEventHandler()));
       transport = injector.getInstance(NettyMessagingTransport.class);
       LOG.info("DriverRPCServer running at {}", transport.getListeningPort());
       isRunning = true;
@@ -120,13 +121,14 @@
    */
   public Configuration getListeningConfiguration() {
     return Tang.Factory.getTang().newConfigurationBuilder()
-        .bindNamedParameter(JobConf.ClientSideRPCServerHost.class, getListeningHost())
-        .bindNamedParameter(JobConf.ClientSideRPCServerPort.class, String.valueOf(getListeningPort()))
-        .build();
+      .bindNamedParameter(JobConf.ClientSideRPCServerHost.class, getListeningHost())
+      .bindNamedParameter(JobConf.ClientSideRPCServerPort.class, String.valueOf(getListeningPort()))
+      .build();
   }
 
   /**
    * Sends a message to driver.
+   *
    * @param message message to send
    */
   public void send(final ControlMessage.ClientToDriverMessage message) {
@@ -186,6 +188,7 @@
 
   /**
    * Throws a {@link RuntimeException} if the server is shut down, or it has different state than the expected state.
+   *
    * @param running the expected state of the server
    */
   private void ensureServerState(final boolean running) {
diff --git a/client/src/main/java/org/apache/nemo/client/JobLauncher.java b/client/src/main/java/org/apache/nemo/client/JobLauncher.java
index ba6194b..233ce73 100644
--- a/client/src/main/java/org/apache/nemo/client/JobLauncher.java
+++ b/client/src/main/java/org/apache/nemo/client/JobLauncher.java
@@ -20,6 +20,7 @@
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.protobuf.ByteString;
+import org.apache.commons.lang3.SerializationUtils;
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.compiler.backend.nemo.NemoPlanRewriter;
 import org.apache.nemo.conf.JobConf;
@@ -27,7 +28,6 @@
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.message.MessageEnvironment;
 import org.apache.nemo.runtime.common.message.MessageParameters;
-import org.apache.commons.lang3.SerializationUtils;
 import org.apache.nemo.runtime.common.plan.PlanRewriter;
 import org.apache.nemo.runtime.master.scheduler.Scheduler;
 import org.apache.reef.client.DriverConfiguration;
@@ -65,12 +65,12 @@
 
   static {
     System.out.println(
-        "\nPowered by\n"
-          + "    _   __                   \n"
-          + "   / | / /__  ____ ___  ____ \n"
-          + "  /  |/ / _ \\/ __ `__ \\/ __ \\\n"
-          + " / /|  /  __/ / / / / / /_/ /\n"
-          + "/_/ |_/\\___/_/ /_/ /_/\\____/ \n"
+      "\nPowered by\n"
+        + "    _   __                   \n"
+        + "   / | / /__  ____ ___  ____ \n"
+        + "  /  |/ / _ \\/ __ `__ \\/ __ \\\n"
+        + " / /|  /  __/ / / / / / /_/ /\n"
+        + "/_/ |_/\\___/_/ /_/ /_/\\____/ \n"
     );
   }
 
@@ -114,10 +114,11 @@
 
   /**
    * Set up the driver, etc. before the actual execution.
+   *
    * @param args arguments.
-   * @throws InjectionException injection exception from REEF.
+   * @throws InjectionException     injection exception from REEF.
    * @throws ClassNotFoundException class not found exception.
-   * @throws IOException IO exception.
+   * @throws IOException            IO exception.
    */
   public static void setup(final String[] args) throws InjectionException, ClassNotFoundException, IOException {
     // Get Job and Driver Confs
@@ -213,7 +214,7 @@
   }
 
   /**
-   * @param dag the application DAG.
+   * @param dag   the application DAG.
    * @param jobId job ID.
    */
   public static void launchDAG(final IRDAG dag, final String jobId) {
@@ -221,9 +222,9 @@
   }
 
   /**
-   * @param dag the application DAG.
+   * @param dag                the application DAG.
    * @param broadcastVariables broadcast variables (can be empty).
-   * @param jobId job ID.
+   * @param jobId              job ID.
    */
   public static void launchDAG(final IRDAG dag,
                                final Map<Serializable, Object> broadcastVariables,
@@ -251,12 +252,12 @@
     serializedDAG = Base64.getEncoder().encodeToString(SerializationUtils.serialize(dag));
     jobDoneLatch = new CountDownLatch(1);
     driverRPCServer.send(ControlMessage.ClientToDriverMessage.newBuilder()
-        .setType(ControlMessage.ClientToDriverMessageType.LaunchDAG)
-        .setLaunchDAG(ControlMessage.LaunchDAGMessage.newBuilder()
-            .setDag(serializedDAG)
-            .setBroadcastVars(ByteString.copyFrom(SerializationUtils.serialize((Serializable) broadcastVariables)))
-            .build())
-        .build());
+      .setType(ControlMessage.ClientToDriverMessageType.LaunchDAG)
+      .setLaunchDAG(ControlMessage.LaunchDAGMessage.newBuilder()
+        .setDag(serializedDAG)
+        .setBroadcastVars(ByteString.copyFrom(SerializationUtils.serialize((Serializable) broadcastVariables)))
+        .build())
+      .build());
 
     // Wait for the ExecutionDone message from the driver
     try {
@@ -310,10 +311,11 @@
 
   /**
    * Fetch scheduler configuration.
+   *
    * @param jobConf job configuration.
    * @return the scheduler configuration.
    * @throws ClassNotFoundException exception while finding the class.
-   * @throws InjectionException exception while injection (REEF Tang).
+   * @throws InjectionException     exception while injection (REEF Tang).
    */
   private static Configuration getSchedulerConf(final Configuration jobConf)
     throws ClassNotFoundException, InjectionException {
@@ -334,10 +336,10 @@
    */
   private static Configuration getDriverNcsConf() throws InjectionException {
     return Configurations.merge(NameServerConfiguration.CONF.build(),
-        LocalNameResolverConfiguration.CONF.build(),
-        TANG.newConfigurationBuilder()
-            .bindImplementation(IdentifierFactory.class, StringIdentifierFactory.class)
-            .build());
+      LocalNameResolverConfiguration.CONF.build(),
+      TANG.newConfigurationBuilder()
+        .bindImplementation(IdentifierFactory.class, StringIdentifierFactory.class)
+        .build());
   }
 
   /**
@@ -348,8 +350,8 @@
    */
   private static Configuration getDriverMessageConf() throws InjectionException {
     return TANG.newConfigurationBuilder()
-        .bindNamedParameter(MessageParameters.SenderId.class, MessageEnvironment.MASTER_COMMUNICATION_ID)
-        .build();
+      .bindNamedParameter(MessageParameters.SenderId.class, MessageEnvironment.MASTER_COMMUNICATION_ID)
+      .build();
   }
 
   /**
@@ -364,16 +366,16 @@
     final String jobId = injector.getNamedInstance(JobConf.JobId.class);
     final int driverMemory = injector.getNamedInstance(JobConf.DriverMemMb.class);
     return DriverConfiguration.CONF
-        .set(DriverConfiguration.GLOBAL_LIBRARIES, EnvironmentUtils.getClassLocation(NemoDriver.class))
-        .set(DriverConfiguration.ON_DRIVER_STARTED, NemoDriver.StartHandler.class)
-        .set(DriverConfiguration.ON_EVALUATOR_ALLOCATED, NemoDriver.AllocatedEvaluatorHandler.class)
-        .set(DriverConfiguration.ON_CONTEXT_ACTIVE, NemoDriver.ActiveContextHandler.class)
-        .set(DriverConfiguration.ON_EVALUATOR_FAILED, NemoDriver.FailedEvaluatorHandler.class)
-        .set(DriverConfiguration.ON_CONTEXT_FAILED, NemoDriver.FailedContextHandler.class)
-        .set(DriverConfiguration.ON_DRIVER_STOP, NemoDriver.DriverStopHandler.class)
-        .set(DriverConfiguration.DRIVER_IDENTIFIER, jobId)
-        .set(DriverConfiguration.DRIVER_MEMORY, driverMemory)
-        .build();
+      .set(DriverConfiguration.GLOBAL_LIBRARIES, EnvironmentUtils.getClassLocation(NemoDriver.class))
+      .set(DriverConfiguration.ON_DRIVER_STARTED, NemoDriver.StartHandler.class)
+      .set(DriverConfiguration.ON_EVALUATOR_ALLOCATED, NemoDriver.AllocatedEvaluatorHandler.class)
+      .set(DriverConfiguration.ON_CONTEXT_ACTIVE, NemoDriver.ActiveContextHandler.class)
+      .set(DriverConfiguration.ON_EVALUATOR_FAILED, NemoDriver.FailedEvaluatorHandler.class)
+      .set(DriverConfiguration.ON_CONTEXT_FAILED, NemoDriver.FailedContextHandler.class)
+      .set(DriverConfiguration.ON_DRIVER_STOP, NemoDriver.DriverStopHandler.class)
+      .set(DriverConfiguration.DRIVER_IDENTIFIER, jobId)
+      .set(DriverConfiguration.DRIVER_MEMORY, driverMemory)
+      .build();
   }
 
   /**
@@ -426,12 +428,12 @@
     switch (deployMode) {
       case "local":
         return LocalRuntimeConfiguration.CONF
-            .set(LocalRuntimeConfiguration.MAX_NUMBER_OF_EVALUATORS, LOCAL_NUMBER_OF_EVALUATORS)
-            .build();
+          .set(LocalRuntimeConfiguration.MAX_NUMBER_OF_EVALUATORS, LOCAL_NUMBER_OF_EVALUATORS)
+          .build();
       case "yarn":
         return YarnClientConfiguration.CONF
-            .set(YarnClientConfiguration.JVM_HEAP_SLACK, injector.getNamedInstance(JobConf.JVMHeapSlack.class))
-            .build();
+          .set(YarnClientConfiguration.JVM_HEAP_SLACK, injector.getNamedInstance(JobConf.JVMHeapSlack.class))
+          .build();
       default:
         throw new UnsupportedOperationException(deployMode);
     }
@@ -451,15 +453,15 @@
                                            final Class<? extends Name<String>> pathParameter,
                                            final Class<? extends Name<String>> contentsParameter,
                                            final String defaultContent)
-      throws InjectionException {
+    throws InjectionException {
     final Injector injector = TANG.newInjector(jobConf);
     try {
       final String path = injector.getNamedInstance(pathParameter);
       final String contents = path.isEmpty() ? defaultContent
         : new String(Files.readAllBytes(Paths.get(path)), StandardCharsets.UTF_8);
       return TANG.newConfigurationBuilder()
-          .bindNamedParameter(contentsParameter, contents)
-          .build();
+        .bindNamedParameter(contentsParameter, contents)
+        .build();
     } catch (final IOException e) {
       throw new RuntimeException(e);
     }
diff --git a/client/src/main/java/org/apache/nemo/client/beam/NemoPipelineResult.java b/client/src/main/java/org/apache/nemo/client/beam/NemoPipelineResult.java
index 512f8f1..d983521 100644
--- a/client/src/main/java/org/apache/nemo/client/beam/NemoPipelineResult.java
+++ b/client/src/main/java/org/apache/nemo/client/beam/NemoPipelineResult.java
@@ -18,9 +18,9 @@
  */
 package org.apache.nemo.client.beam;
 
-import org.apache.nemo.client.ClientEndpoint;
 import org.apache.beam.sdk.PipelineResult;
 import org.apache.beam.sdk.metrics.MetricResults;
+import org.apache.nemo.client.ClientEndpoint;
 import org.joda.time.Duration;
 
 import java.io.IOException;
diff --git a/client/src/main/java/org/apache/nemo/client/beam/NemoRunner.java b/client/src/main/java/org/apache/nemo/client/beam/NemoRunner.java
index ccafc3e..81aa18d 100644
--- a/client/src/main/java/org/apache/nemo/client/beam/NemoRunner.java
+++ b/client/src/main/java/org/apache/nemo/client/beam/NemoRunner.java
@@ -18,12 +18,12 @@
  */
 package org.apache.nemo.client.beam;
 
-import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.nemo.client.JobLauncher;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.PipelineRunner;
 import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.PipelineOptionsValidator;
+import org.apache.nemo.client.JobLauncher;
 import org.apache.nemo.compiler.frontend.beam.NemoPipelineOptions;
 import org.apache.nemo.compiler.frontend.beam.PipelineVisitor;
 
@@ -35,6 +35,7 @@
 
   /**
    * BEAM Pipeline Runner.
+   *
    * @param nemoPipelineOptions PipelineOptions.
    */
   private NemoRunner(final NemoPipelineOptions nemoPipelineOptions) {
@@ -64,6 +65,7 @@
 
   /**
    * Static initializer for creating PipelineRunner with the given options.
+   *
    * @param options given PipelineOptions.
    * @return The created PipelineRunner.
    */
@@ -74,6 +76,7 @@
 
   /**
    * Method to run the Pipeline.
+   *
    * @param pipeline the Pipeline to run.
    * @return The result of the pipeline.
    */
diff --git a/client/src/main/java/org/apache/nemo/client/beam/NemoRunnerRegistrar.java b/client/src/main/java/org/apache/nemo/client/beam/NemoRunnerRegistrar.java
index 832bcd6..a6b7f84 100644
--- a/client/src/main/java/org/apache/nemo/client/beam/NemoRunnerRegistrar.java
+++ b/client/src/main/java/org/apache/nemo/client/beam/NemoRunnerRegistrar.java
@@ -28,7 +28,7 @@
 
 /**
  * Contains the {@link PipelineRunnerRegistrar} and {@link PipelineOptionsRegistrar} for the {@link NemoRunner}.
- *
+ * <p>
  * {@link AutoService} will register Nemo's implementations of the {@link PipelineRunner} and {@link PipelineOptions}
  * as available pipeline runner services.
  */
diff --git a/client/src/main/java/org/apache/reef/runtime/yarn/ClassPathBuilder.java b/client/src/main/java/org/apache/reef/runtime/yarn/ClassPathBuilder.java
index 9b53d8c..86682f7 100644
--- a/client/src/main/java/org/apache/reef/runtime/yarn/ClassPathBuilder.java
+++ b/client/src/main/java/org/apache/reef/runtime/yarn/ClassPathBuilder.java
@@ -45,8 +45,8 @@
    */
   private static boolean couldBeYarnConfigurationPath(final String path) {
     return path.contains("conf") ||
-            path.contains("etc") ||
-            path.contains(HadoopEnvironment.HADOOP_CONF_DIR);
+      path.contains("etc") ||
+      path.contains(HadoopEnvironment.HADOOP_CONF_DIR);
   }
 
   /**
diff --git a/client/src/test/java/org/apache/nemo/client/ClientDriverRPCTest.java b/client/src/test/java/org/apache/nemo/client/ClientDriverRPCTest.java
index ef2939f..24522a8 100644
--- a/client/src/test/java/org/apache/nemo/client/ClientDriverRPCTest.java
+++ b/client/src/test/java/org/apache/nemo/client/ClientDriverRPCTest.java
@@ -18,8 +18,8 @@
  */
 package org.apache.nemo.client;
 
-import org.apache.nemo.runtime.master.ClientRPC;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
+import org.apache.nemo.runtime.master.ClientRPC;
 import org.apache.reef.tang.Injector;
 import org.apache.reef.tang.Tang;
 import org.apache.reef.tang.exceptions.InjectionException;
@@ -35,6 +35,7 @@
 public final class ClientDriverRPCTest {
   private DriverRPCServer driverRPCServer;
   private ClientRPC clientRPC;
+
   @Before
   public void setupDriverRPCServer() {
     // Initialize DriverRPCServer.
@@ -55,6 +56,7 @@
 
   /**
    * Test with empty set of handlers.
+   *
    * @throws InjectionException on Exceptions on creating {@link ClientRPC}.
    */
   @Test
@@ -64,37 +66,39 @@
 
   /**
    * Test with basic request method from driver to client.
-   * @throws InjectionException on Exceptions on creating {@link ClientRPC}.
+   *
+   * @throws InjectionException   on Exceptions on creating {@link ClientRPC}.
    * @throws InterruptedException when interrupted while waiting EventHandler invocation
    */
   @Test
   public void testDriverToClientMethodInvocation() throws InjectionException, InterruptedException {
     final CountDownLatch latch = new CountDownLatch(1);
     driverRPCServer.registerHandler(ControlMessage.DriverToClientMessageType.DriverStarted,
-        msg -> latch.countDown());
+      msg -> latch.countDown());
     setupClientRPC();
     clientRPC.send(ControlMessage.DriverToClientMessage.newBuilder()
-        .setType(ControlMessage.DriverToClientMessageType.DriverStarted).build());
+      .setType(ControlMessage.DriverToClientMessageType.DriverStarted).build());
     latch.await();
   }
 
   /**
    * Test with request-response RPC between client and driver.
-   * @throws InjectionException on Exceptions on creating {@link ClientRPC}.
+   *
+   * @throws InjectionException   on Exceptions on creating {@link ClientRPC}.
    * @throws InterruptedException when interrupted while waiting EventHandler invocation
    */
   @Test
   public void testBetweenClientAndDriver() throws InjectionException, InterruptedException {
     final CountDownLatch latch = new CountDownLatch(1);
     driverRPCServer.registerHandler(ControlMessage.DriverToClientMessageType.DriverStarted,
-        msg -> driverRPCServer.send(ControlMessage.ClientToDriverMessage.newBuilder()
-            .setType(ControlMessage.ClientToDriverMessageType.LaunchDAG)
-            .setLaunchDAG(ControlMessage.LaunchDAGMessage.newBuilder().setDag("").build())
-            .build()));
+      msg -> driverRPCServer.send(ControlMessage.ClientToDriverMessage.newBuilder()
+        .setType(ControlMessage.ClientToDriverMessageType.LaunchDAG)
+        .setLaunchDAG(ControlMessage.LaunchDAGMessage.newBuilder().setDag("").build())
+        .build()));
     setupClientRPC();
     clientRPC.registerHandler(ControlMessage.ClientToDriverMessageType.LaunchDAG, msg -> latch.countDown());
     clientRPC.send(ControlMessage.DriverToClientMessage.newBuilder()
-        .setType(ControlMessage.DriverToClientMessageType.DriverStarted).build());
+      .setType(ControlMessage.DriverToClientMessageType.DriverStarted).build());
     latch.await();
   }
 }
diff --git a/client/src/test/java/org/apache/nemo/client/ClientEndpointTest.java b/client/src/test/java/org/apache/nemo/client/ClientEndpointTest.java
index 7095f39..2e4d1b9 100644
--- a/client/src/test/java/org/apache/nemo/client/ClientEndpointTest.java
+++ b/client/src/test/java/org/apache/nemo/client/ClientEndpointTest.java
@@ -19,11 +19,11 @@
 package org.apache.nemo.client;
 
 import org.apache.nemo.runtime.common.plan.PhysicalPlan;
+import org.apache.nemo.runtime.common.plan.TestPlanGenerator;
 import org.apache.nemo.runtime.common.state.PlanState;
 import org.apache.nemo.runtime.common.state.TaskState;
-import org.apache.nemo.runtime.master.metric.MetricMessageHandler;
 import org.apache.nemo.runtime.master.PlanStateManager;
-import org.apache.nemo.runtime.common.plan.TestPlanGenerator;
+import org.apache.nemo.runtime.master.metric.MetricMessageHandler;
 import org.apache.reef.tang.Injector;
 import org.apache.reef.tang.Tang;
 import org.junit.Test;
@@ -59,7 +59,7 @@
 
     // Create a PlanStateManager of a dag and create a DriverEndpoint with it.
     final PhysicalPlan physicalPlan =
-        TestPlanGenerator.generatePhysicalPlan(TestPlanGenerator.PlanType.TwoVerticesJoined, false);
+      TestPlanGenerator.generatePhysicalPlan(TestPlanGenerator.PlanType.TwoVerticesJoined, false);
     final Injector injector = Tang.Factory.getTang().newInjector();
     injector.bindVolatileInstance(MetricMessageHandler.class, mock(MetricMessageHandler.class));
     final PlanStateManager planStateManager = injector.getInstance(PlanStateManager.class);
@@ -75,8 +75,8 @@
 
     // Check finish.
     final List<String> tasks = physicalPlan.getStageDAG().getTopologicalSort().stream()
-        .flatMap(stage -> planStateManager.getTaskAttemptsToSchedule(stage.getId()).stream())
-        .collect(Collectors.toList());
+      .flatMap(stage -> planStateManager.getTaskAttemptsToSchedule(stage.getId()).stream())
+      .collect(Collectors.toList());
     tasks.forEach(taskId -> planStateManager.onTaskStateChanged(taskId, TaskState.State.EXECUTING));
     tasks.forEach(taskId -> planStateManager.onTaskStateChanged(taskId, TaskState.State.COMPLETE));
     assertEquals(PlanState.State.COMPLETE, clientEndpoint.waitUntilJobFinish());
diff --git a/common/pom.xml b/common/pom.xml
index e35b27f..a325148 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -17,41 +17,42 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-project</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-project</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
 
-    <artifactId>nemo-common</artifactId>
-    <name>Nemo Common</name>
+  <artifactId>nemo-common</artifactId>
+  <name>Nemo Common</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.reef</groupId>
-            <artifactId>reef-io</artifactId>
-            <version>${reef.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.reef</groupId>
-            <artifactId>reef-common</artifactId>
-            <version>${reef.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-core</artifactId>
-            <version>${hadoop.version}</version>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.beam</groupId>
-          <artifactId>beam-sdks-java-core</artifactId>
-          <version>${beam.version}</version>
-        </dependency>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.reef</groupId>
+      <artifactId>reef-io</artifactId>
+      <version>${reef.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.reef</groupId>
+      <artifactId>reef-common</artifactId>
+      <version>${reef.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.beam</groupId>
+      <artifactId>beam-sdks-java-core</artifactId>
+      <version>${beam.version}</version>
+    </dependency>
 
-    </dependencies>
+  </dependencies>
 </project>
diff --git a/common/src/main/java/org/apache/nemo/common/DirectByteArrayOutputStream.java b/common/src/main/java/org/apache/nemo/common/DirectByteArrayOutputStream.java
index 84b9e3e..9f41e5d 100644
--- a/common/src/main/java/org/apache/nemo/common/DirectByteArrayOutputStream.java
+++ b/common/src/main/java/org/apache/nemo/common/DirectByteArrayOutputStream.java
@@ -36,6 +36,7 @@
 
   /**
    * Constructor specifying the size.
+   *
    * @param size the initial size.
    */
   public DirectByteArrayOutputStream(final int size) {
@@ -45,6 +46,7 @@
   /**
    * Note that serializedBytes include invalid bytes.
    * So we have to use it with the actualLength by using size() whenever needed.
+   *
    * @return the buffer where data is stored.
    */
   public byte[] getBufDirectly() {
diff --git a/common/src/main/java/org/apache/nemo/common/HashRange.java b/common/src/main/java/org/apache/nemo/common/HashRange.java
index edcb39b..c3209a4 100644
--- a/common/src/main/java/org/apache/nemo/common/HashRange.java
+++ b/common/src/main/java/org/apache/nemo/common/HashRange.java
@@ -30,8 +30,9 @@
 
   /**
    * Private constructor.
+   *
    * @param rangeBeginInclusive point at which the hash range starts (inclusive).
-   * @param rangeEndExclusive point at which the hash range ends (exclusive).
+   * @param rangeEndExclusive   point at which the hash range ends (exclusive).
    */
   private HashRange(final int rangeBeginInclusive, final int rangeEndExclusive) {
     if (rangeBeginInclusive < 0 || rangeEndExclusive < 0) {
@@ -121,7 +122,7 @@
    */
   @Override
   public int hashCode() {
-    return Arrays.hashCode(new Object[] {
+    return Arrays.hashCode(new Object[]{
       rangeBeginInclusive,
       rangeEndExclusive,
     });
diff --git a/common/src/main/java/org/apache/nemo/common/KeyRange.java b/common/src/main/java/org/apache/nemo/common/KeyRange.java
index 8a66050..1a6b764 100644
--- a/common/src/main/java/org/apache/nemo/common/KeyRange.java
+++ b/common/src/main/java/org/apache/nemo/common/KeyRange.java
@@ -22,6 +22,7 @@
 
 /**
  * Represents the key range of data partitions within a block.
+ *
  * @param <K> the type of key to assign for each partition.
  */
 public interface KeyRange<K extends Serializable> extends Serializable {
diff --git a/common/src/main/java/org/apache/nemo/common/Pair.java b/common/src/main/java/org/apache/nemo/common/Pair.java
index 7ddc7d4..d14203f 100644
--- a/common/src/main/java/org/apache/nemo/common/Pair.java
+++ b/common/src/main/java/org/apache/nemo/common/Pair.java
@@ -23,6 +23,7 @@
 
 /**
  * Pair class.
+ *
  * @param <A> type of the left element.
  * @param <B> type of the right element.
  */
@@ -32,7 +33,8 @@
 
   /**
    * Private constructor for Pair class.
-   * @param left left element.
+   *
+   * @param left  left element.
    * @param right right element.
    */
   private Pair(final A left, final B right) {
@@ -46,6 +48,7 @@
   public A left() {
     return left;
   }
+
   /**
    * @return right element
    */
@@ -61,23 +64,24 @@
   @Override
   public boolean equals(final Object obj) {
     return obj instanceof Pair
-        && Objects.equals(this.left, ((Pair) obj).left)
-        && Objects.equals(this.right, ((Pair) obj).right);
+      && Objects.equals(this.left, ((Pair) obj).left)
+      && Objects.equals(this.right, ((Pair) obj).right);
   }
 
   @Override
   public int hashCode() {
     return this.left == null
-        ? (this.right == null ? 0 : this.right.hashCode() + 1)
-        : (this.right == null ? this.left.hashCode() + 2 : this.left.hashCode() * 17 + this.right.hashCode());
+      ? (this.right == null ? 0 : this.right.hashCode() + 1)
+      : (this.right == null ? this.left.hashCode() + 2 : this.left.hashCode() * 17 + this.right.hashCode());
   }
 
   /**
    * Static initializer of the Pair class.
-   * @param left left element.
+   *
+   * @param left  left element.
    * @param right right element.
-   * @param <A> Type of the left element.
-   * @param <B> Type of the right element.
+   * @param <A>   Type of the left element.
+   * @param <B>   Type of the right element.
    * @return the newly created Pair.
    */
   public static <A, B> Pair<A, B> of(final A left, final B right) {
diff --git a/common/src/main/java/org/apache/nemo/common/StateMachine.java b/common/src/main/java/org/apache/nemo/common/StateMachine.java
index 28740a9..bc76b61 100644
--- a/common/src/main/java/org/apache/nemo/common/StateMachine.java
+++ b/common/src/main/java/org/apache/nemo/common/StateMachine.java
@@ -36,7 +36,8 @@
 
   /**
    * Private constructor.
-   * @param stateMap Map of state enum to the state.
+   *
+   * @param stateMap     Map of state enum to the state.
    * @param initialState initial state of the state machine.
    */
   private StateMachine(final Map<Enum, State> stateMap, final Enum initialState) {
@@ -62,7 +63,7 @@
    *
    * @param state a state
    * @throws IllegalStateTransitionException the state is unknown state, or the transition
-   * from the current state to the specified state is illegal
+   *                                         from the current state to the specified state is illegal
    */
   public synchronized void setState(final Enum state) throws IllegalStateTransitionException {
     if (!stateMap.containsKey(state)) {
@@ -83,11 +84,11 @@
    * if the current state equals to the expected state.
    *
    * @param expectedCurrentState an expected state
-   * @param state a state
+   * @param state                a state
    * @return {@code true} if successful. {@code false} indicates that
    * the actual value was not equal to the expected value.
    * @throws IllegalStateTransitionException if the state is unknown state, or the transition
-   * from the current state to the specified state is illegal
+   *                                         from the current state to the specified state is illegal
    */
   public synchronized boolean compareAndSetState(final Enum expectedCurrentState,
                                                  final Enum state) throws IllegalStateTransitionException {
@@ -96,7 +97,7 @@
       setState(state);
     } else {
       LOG.debug("The expected current state [" + expectedCurrentState
-          + "] is different from the actual state [" + currentState.stateEnum + "]");
+        + "] is different from the actual state [" + currentState.stateEnum + "]");
     }
 
     return compared;
@@ -114,7 +115,7 @@
    */
   private String getPossibleTransitionsFromCurrentState() {
     final StringBuilder stringBuilder = new StringBuilder()
-        .append("Possible transitions from the current state are").append('\n');
+      .append("Possible transitions from the current state are").append('\n');
 
     for (final Transition transition : currentState.getAllTransitions()) {
       stringBuilder.append(transition).append('\n');
@@ -128,7 +129,7 @@
     final StringBuilder stringBuilder = new StringBuilder();
     for (final State state : stateMap.values()) {
       stringBuilder.append(state).append('\n')
-          .append("Possible transitions:").append('\n');
+        .append("Possible transitions:").append('\n');
       for (final Transition transition : state.getAllTransitions()) {
         stringBuilder.append(transition).append('\n');
       }
@@ -154,7 +155,8 @@
 
     /**
      * Private constructor.
-     * @param stateEnum Enum of the state.
+     *
+     * @param stateEnum   Enum of the state.
      * @param description Description of the state.
      */
     private State(final Enum stateEnum, final String description) {
@@ -165,6 +167,7 @@
 
     /**
      * Adds a transition to the state.
+     *
      * @param transition the added transition.
      */
     private void addTransition(final Transition transition) {
@@ -206,8 +209,9 @@
 
     /**
      * Private constructor.
-     * @param from State from which we switch.
-     * @param to State that we switch to.
+     *
+     * @param from        State from which we switch.
+     * @param to          State that we switch to.
      * @param description Description of the transition.
      */
     private Transition(final State from, final State to, final String description) {
@@ -244,7 +248,7 @@
     /**
      * Adds a state with name and description.
      *
-     * @param stateEnum enumeration indicating the state
+     * @param stateEnum   enumeration indicating the state
      * @param description description of the state
      * @return the builder
      * @throws RuntimeException if the state was already added
@@ -275,12 +279,12 @@
     /**
      * Adds a transition with description.
      *
-     * @param from from state name
-     * @param to to state name
+     * @param from        from state name
+     * @param to          to state name
      * @param description description of the transition
      * @return the builder
      * @throws RuntimeException if either from or to state was not added, or the same transition
-     * was already added
+     *                          was already added
      */
     public Builder addTransition(final Enum from, final Enum to, final String description) {
       if (!stateEnumSet.contains(from)) {
diff --git a/common/src/main/java/org/apache/nemo/common/Util.java b/common/src/main/java/org/apache/nemo/common/Util.java
index 4d9e042..1156704 100644
--- a/common/src/main/java/org/apache/nemo/common/Util.java
+++ b/common/src/main/java/org/apache/nemo/common/Util.java
@@ -55,6 +55,7 @@
 
   /**
    * Finds the project root path.
+   *
    * @return the project root path.
    */
   public static String fetchProjectRootPath() {
@@ -63,6 +64,7 @@
 
   /**
    * Helper method to recursively find the LICENSE file.
+   *
    * @param path the path to search for.
    * @return the path containing the LICENSE file.
    */
@@ -102,8 +104,8 @@
 
   /**
    * @param edgeToClone to copy execution properties from.
-   * @param newSrc of the new edge.
-   * @param newDst of the new edge.
+   * @param newSrc      of the new edge.
+   * @param newDst      of the new edge.
    * @return the new edge.
    */
   public static IREdge cloneEdge(final IREdge edgeToClone,
@@ -119,8 +121,8 @@
    *
    * @param commPattern to use.
    * @param edgeToClone to copy execution properties from.
-   * @param newSrc of the new edge.
-   * @param newDst of the new edge.
+   * @param newSrc      of the new edge.
+   * @param newDst      of the new edge.
    * @return the new edge.
    */
   public static IREdge cloneEdge(final CommunicationPatternProperty.Value commPattern,
@@ -204,6 +206,7 @@
 
   /**
    * Method for the instrumentation: for getting the object size.
+   *
    * @param args arguments.
    * @param inst the instrumentation.
    */
@@ -213,6 +216,7 @@
 
   /**
    * Get the object byte size.
+   *
    * @param o object to measure.
    * @return the bytes of the object.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/coder/BytesDecoderFactory.java b/common/src/main/java/org/apache/nemo/common/coder/BytesDecoderFactory.java
index 45c47b3..594d412 100644
--- a/common/src/main/java/org/apache/nemo/common/coder/BytesDecoderFactory.java
+++ b/common/src/main/java/org/apache/nemo/common/coder/BytesDecoderFactory.java
@@ -43,6 +43,7 @@
 
   /**
    * Static initializer of the decoder.
+   *
    * @return the initializer.
    */
   public static BytesDecoderFactory of() {
@@ -70,7 +71,7 @@
     /**
      * Constructor.
      *
-     * @param inputStream  the input stream to decode.
+     * @param inputStream the input stream to decode.
      */
     private BytesDecoder(final InputStream inputStream) {
       this.inputStream = inputStream;
diff --git a/common/src/main/java/org/apache/nemo/common/coder/BytesEncoderFactory.java b/common/src/main/java/org/apache/nemo/common/coder/BytesEncoderFactory.java
index 6666a7b..afe1623 100644
--- a/common/src/main/java/org/apache/nemo/common/coder/BytesEncoderFactory.java
+++ b/common/src/main/java/org/apache/nemo/common/coder/BytesEncoderFactory.java
@@ -21,7 +21,8 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.*;
+import java.io.IOException;
+import java.io.OutputStream;
 
 /**
  * A {@link EncoderFactory} which is used for an array of bytes.
@@ -40,6 +41,7 @@
 
   /**
    * Static initializer of the encoder.
+   *
    * @return the initializer.
    */
   public static BytesEncoderFactory of() {
diff --git a/common/src/main/java/org/apache/nemo/common/coder/IntDecoderFactory.java b/common/src/main/java/org/apache/nemo/common/coder/IntDecoderFactory.java
index 41653f2..146965b 100644
--- a/common/src/main/java/org/apache/nemo/common/coder/IntDecoderFactory.java
+++ b/common/src/main/java/org/apache/nemo/common/coder/IntDecoderFactory.java
@@ -18,7 +18,9 @@
  */
 package org.apache.nemo.common.coder;
 
-import java.io.*;
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.io.InputStream;
 
 /**
  * A {@link DecoderFactory} which is used for an integer.
@@ -36,6 +38,7 @@
 
   /**
    * Static initializer of the coder.
+   *
    * @return the initializer.
    */
   public static IntDecoderFactory of() {
@@ -62,7 +65,7 @@
     /**
      * Constructor.
      *
-     * @param inputStream  the input stream to decode.
+     * @param inputStream the input stream to decode.
      */
     private IntDecoder(final InputStream inputStream) {
       // If the inputStream is closed well in upper level, it is okay to not close this stream
diff --git a/common/src/main/java/org/apache/nemo/common/coder/IntEncoderFactory.java b/common/src/main/java/org/apache/nemo/common/coder/IntEncoderFactory.java
index 6be865e..d47ee50 100644
--- a/common/src/main/java/org/apache/nemo/common/coder/IntEncoderFactory.java
+++ b/common/src/main/java/org/apache/nemo/common/coder/IntEncoderFactory.java
@@ -18,7 +18,9 @@
  */
 package org.apache.nemo.common.coder;
 
-import java.io.*;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
 
 /**
  * A {@link EncoderFactory} which is used for an integer.
@@ -36,6 +38,7 @@
 
   /**
    * Static initializer of the coder.
+   *
    * @return the initializer.
    */
   public static IntEncoderFactory of() {
diff --git a/common/src/main/java/org/apache/nemo/common/coder/LongDecoderFactory.java b/common/src/main/java/org/apache/nemo/common/coder/LongDecoderFactory.java
index 622ada2..0a0804c 100644
--- a/common/src/main/java/org/apache/nemo/common/coder/LongDecoderFactory.java
+++ b/common/src/main/java/org/apache/nemo/common/coder/LongDecoderFactory.java
@@ -38,6 +38,7 @@
 
   /**
    * Static initializer of the coder.
+   *
    * @return the initializer.
    */
   public static LongDecoderFactory of() {
@@ -63,7 +64,7 @@
     /**
      * Constructor.
      *
-     * @param inputStream  the input stream to decode.
+     * @param inputStream the input stream to decode.
      */
     private LongDecoder(final InputStream inputStream) {
       // If the inputStream is closed well in upper level, it is okay to not close this stream
diff --git a/common/src/main/java/org/apache/nemo/common/coder/LongEncoderFactory.java b/common/src/main/java/org/apache/nemo/common/coder/LongEncoderFactory.java
index d7c6f08..b4a3086 100644
--- a/common/src/main/java/org/apache/nemo/common/coder/LongEncoderFactory.java
+++ b/common/src/main/java/org/apache/nemo/common/coder/LongEncoderFactory.java
@@ -38,6 +38,7 @@
 
   /**
    * Static initializer of the coder.
+   *
    * @return the initializer.
    */
   public static LongEncoderFactory of() {
diff --git a/common/src/main/java/org/apache/nemo/common/coder/PairDecoderFactory.java b/common/src/main/java/org/apache/nemo/common/coder/PairDecoderFactory.java
index 790690d..1e36310 100644
--- a/common/src/main/java/org/apache/nemo/common/coder/PairDecoderFactory.java
+++ b/common/src/main/java/org/apache/nemo/common/coder/PairDecoderFactory.java
@@ -25,6 +25,7 @@
 
 /**
  * An DecoderFactory for {@link Pair}. Reference: KvCoder in BEAM.
+ *
  * @param <A> type for the left coder.
  * @param <B> type for the right coder.
  */
@@ -49,8 +50,8 @@
    *
    * @param leftDecoderFactory  left coder.
    * @param rightDecoderFactory right coder.
-   * @param <A>          type of the left element.
-   * @param <B>          type of the right element.
+   * @param <A>                 type of the left element.
+   * @param <B>                 type of the right element.
    * @return the new PairDecoderFactory.
    */
   public static <A, B> PairDecoderFactory<A, B> of(final DecoderFactory<A> leftDecoderFactory,
@@ -76,6 +77,7 @@
 
   /**
    * PairDecoder.
+   *
    * @param <T1> type for the left coder.
    * @param <T2> type for the right coder.
    */
@@ -87,7 +89,7 @@
     /**
      * Constructor.
      *
-     * @param inputStream  the input stream to decode.
+     * @param inputStream         the input stream to decode.
      * @param leftDecoderFactory  the actual decoder to use for left elements.
      * @param rightDecoderFactory the actual decoder to use for right elements.
      * @throws IOException if fail to instantiate coders.
diff --git a/common/src/main/java/org/apache/nemo/common/coder/PairEncoderFactory.java b/common/src/main/java/org/apache/nemo/common/coder/PairEncoderFactory.java
index 030c336..b2db262 100644
--- a/common/src/main/java/org/apache/nemo/common/coder/PairEncoderFactory.java
+++ b/common/src/main/java/org/apache/nemo/common/coder/PairEncoderFactory.java
@@ -19,11 +19,13 @@
 package org.apache.nemo.common.coder;
 
 import org.apache.nemo.common.Pair;
+
 import java.io.IOException;
 import java.io.OutputStream;
 
 /**
  * An EncoderFactory for {@link Pair}. Reference: KvCoder in BEAM.
+ *
  * @param <A> type for the left coder.
  * @param <B> type for the right coder.
  */
@@ -48,8 +50,8 @@
    *
    * @param leftEncoderFactory  left coder.
    * @param rightEncoderFactory right coder.
-   * @param <A>          type of the left element.
-   * @param <B>          type of the right element.
+   * @param <A>                 type of the left element.
+   * @param <B>                 type of the right element.
    * @return the new PairEncoderFactory.
    */
   public static <A, B> PairEncoderFactory<A, B> of(final EncoderFactory<A> leftEncoderFactory,
@@ -75,6 +77,7 @@
 
   /**
    * PairEncoder.
+   *
    * @param <T1> type for the left coder.
    * @param <T2> type for the right coder.
    */
@@ -86,7 +89,7 @@
     /**
      * Constructor.
      *
-     * @param outputStream the output stream to store the encoded bytes.
+     * @param outputStream        the output stream to store the encoded bytes.
      * @param leftEncoderFactory  the actual encoder to use for left elements.
      * @param rightEncoderFactory the actual encoder to use for right elements.
      * @throws IOException if fail to instantiate coders.
diff --git a/common/src/main/java/org/apache/nemo/common/dag/DAG.java b/common/src/main/java/org/apache/nemo/common/dag/DAG.java
index de11d98..bbe6042 100644
--- a/common/src/main/java/org/apache/nemo/common/dag/DAG.java
+++ b/common/src/main/java/org/apache/nemo/common/dag/DAG.java
@@ -37,6 +37,7 @@
 
 /**
  * DAG implementation.
+ *
  * @param <V> the vertex type
  * @param <E> the edge type
  */
@@ -52,11 +53,12 @@
 
   /**
    * Constructor of DAG, called by the DAGBuilder.
-   * @param vertices set of vertices.
-   * @param incomingEdges map of incoming edges for each vertex.
-   * @param outgoingEdges map of outgoing edges for each vertex.
+   *
+   * @param vertices              set of vertices.
+   * @param incomingEdges         map of incoming edges for each vertex.
+   * @param outgoingEdges         map of outgoing edges for each vertex.
    * @param assignedLoopVertexMap map of assignedLoopVertex info.
-   * @param loopStackDepthMap map of stack depth of LoopVertices.
+   * @param loopStackDepthMap     map of stack depth of LoopVertices.
    */
   public DAG(final Set<V> vertices,
              final Map<V, Set<E>> incomingEdges,
@@ -68,9 +70,9 @@
     this.outgoingEdges = new HashMap<>();
     vertices.stream().sorted(Comparator.comparingInt(Vertex::getNumericId)).forEachOrdered(this.vertices::add);
     incomingEdges.forEach((v, es) -> this.incomingEdges.put(v.getId(),
-        es.stream().sorted(Comparator.comparingInt(Edge::getNumericId)).collect(Collectors.toList())));
+      es.stream().sorted(Comparator.comparingInt(Edge::getNumericId)).collect(Collectors.toList())));
     outgoingEdges.forEach((v, es) -> this.outgoingEdges.put(v.getId(),
-        es.stream().sorted(Comparator.comparingInt(Edge::getNumericId)).collect(Collectors.toList())));
+      es.stream().sorted(Comparator.comparingInt(Edge::getNumericId)).collect(Collectors.toList())));
 
     this.rootVertices = new ArrayList<>();
     vertices.forEach(v -> {
@@ -150,7 +152,7 @@
       }
     }
     throw new IllegalEdgeOperationException(
-        new Throwable("There exists no edge from " + srcVertexId + " to " + dstVertexId));
+      new Throwable("There exists no edge from " + srcVertexId + " to " + dstVertexId));
   }
 
   @Override
@@ -169,8 +171,9 @@
 
   /**
    * Recursively adds ancestors of a vertex to the given list.
+   *
    * @param ancestorList to accumulate the ancestors.
-   * @param vertexId to find the ancestors for.
+   * @param vertexId     to find the ancestors for.
    */
   private void addAncestors(final List<V> ancestorList, final String vertexId) {
     getParents(vertexId).forEach(parent -> {
@@ -208,15 +211,15 @@
   public void dfsTraverse(final Consumer<V> function, final TraversalOrder traversalOrder) {
     final Set<V> visited = new HashSet<>();
     getVertices().stream().filter(vertex -> incomingEdges.get(vertex.getId()).isEmpty()) // root Operators
-        .filter(vertex -> !visited.contains(vertex))
-        .forEachOrdered(vertex -> dfsDo(vertex, function, traversalOrder, visited));
+      .filter(vertex -> !visited.contains(vertex))
+      .forEachOrdered(vertex -> dfsDo(vertex, function, traversalOrder, visited));
   }
 
   @Override
   public void dfsDo(final V vertex,
-                     final Consumer<V> vertexConsumer,
-                     final TraversalOrder traversalOrder,
-                     final Set<V> visited) {
+                    final Consumer<V> vertexConsumer,
+                    final TraversalOrder traversalOrder,
+                    final Set<V> visited) {
     visited.add(vertex);
     if (traversalOrder == TraversalOrder.PreOrder) {
       vertexConsumer.accept(vertex);
@@ -224,8 +227,8 @@
     final List<E> outEdges = getOutgoingEdgesOf(vertex);
     if (!outEdges.isEmpty()) {
       outEdges.stream().map(Edge::getDst)
-          .filter(outOperator -> !visited.contains(outOperator))
-          .forEachOrdered(outOperator -> dfsDo(outOperator, vertexConsumer, traversalOrder, visited));
+        .filter(outOperator -> !visited.contains(outOperator))
+        .forEachOrdered(outOperator -> dfsDo(outOperator, vertexConsumer, traversalOrder, visited));
     }
     if (traversalOrder == TraversalOrder.PostOrder) {
       vertexConsumer.accept(vertex);
@@ -237,8 +240,10 @@
     final Set<V> reachableFromV1 = new HashSet<>();
     final Set<V> reachableFromV2 = new HashSet<>();
 
-    this.dfsDo(v1, (v) -> { }, TraversalOrder.PostOrder, reachableFromV1);
-    this.dfsDo(v2, (v) -> { }, TraversalOrder.PostOrder, reachableFromV2);
+    this.dfsDo(v1, (v) -> {
+    }, TraversalOrder.PostOrder, reachableFromV1);
+    this.dfsDo(v2, (v) -> {
+    }, TraversalOrder.PostOrder, reachableFromV2);
 
     return reachableFromV1.contains(v2) || reachableFromV2.contains(v1);
   }
@@ -306,10 +311,10 @@
       printWriter.println(toString());
       printWriter.close();
       LOG.debug(String.format("DAG JSON for %s is saved at %s"
-          + " (Use https://service.jangho.kr/nemo-dag/ to visualize it.)", description, file.getPath()));
+        + " (Use https://service.jangho.kr/nemo-dag/ to visualize it.)", description, file.getPath()));
     } catch (IOException e) {
       LOG.warn(String.format("Cannot store JSON representation of %s to %s: %s",
-          description, file.getPath(), e.toString()));
+        description, file.getPath(), e.toString()));
     }
   }
 }
diff --git a/common/src/main/java/org/apache/nemo/common/dag/DAGBuilder.java b/common/src/main/java/org/apache/nemo/common/dag/DAGBuilder.java
index bd5cc95..1d6af69 100644
--- a/common/src/main/java/org/apache/nemo/common/dag/DAGBuilder.java
+++ b/common/src/main/java/org/apache/nemo/common/dag/DAGBuilder.java
@@ -19,8 +19,11 @@
 package org.apache.nemo.common.dag;
 
 import org.apache.nemo.common.exception.CompileTimeOptimizationException;
-import org.apache.nemo.common.ir.vertex.*;
 import org.apache.nemo.common.exception.IllegalVertexOperationException;
+import org.apache.nemo.common.ir.vertex.IRVertex;
+import org.apache.nemo.common.ir.vertex.LoopVertex;
+import org.apache.nemo.common.ir.vertex.OperatorVertex;
+import org.apache.nemo.common.ir.vertex.SourceVertex;
 import org.apache.nemo.common.ir.vertex.executionproperty.MessageIdVertexProperty;
 import org.apache.nemo.common.ir.vertex.utility.MessageAggregatorVertex;
 import org.apache.nemo.common.ir.vertex.utility.SamplingVertex;
@@ -34,6 +37,7 @@
 
 /**
  * DAG Builder.
+ *
  * @param <V> the vertex type.
  * @param <E> the edge type.
  */
@@ -157,10 +161,10 @@
     } else {
       this.buildWithoutSourceSinkCheck().storeJSON("debug", "errored_ir", "Errored IR");
       throw new IllegalVertexOperationException("The DAG does not contain"
-          + (vertices.contains(src) ? "" : " [source]") + (vertices.contains(dst) ? "" : " [destination]")
-          + " of the edge: [" + (src == null ? null : src.getId())
-          + "]->[" + (dst == null ? null : dst.getId()) + "] in "
-          + vertices.stream().map(V::getId).collect(Collectors.toSet()));
+        + (vertices.contains(src) ? "" : " [source]") + (vertices.contains(dst) ? "" : " [destination]")
+        + " of the edge: [" + (src == null ? null : src.getId())
+        + "]->[" + (dst == null ? null : dst.getId()) + "] in "
+        + vertices.stream().map(V::getId).collect(Collectors.toSet()));
     }
     return this;
   }
@@ -212,8 +216,8 @@
       throw getException("DAG contains a cycle", vertex.toString());
     } else {
       outgoingEdges.get(vertex).stream().map(Edge::getDst)
-          .filter(v -> !visited.contains(v))
-          .forEachOrdered(v -> cycleCheck(stack, visited, v));
+        .filter(v -> !visited.contains(v))
+        .forEachOrdered(v -> cycleCheck(stack, visited, v));
     }
     stack.pop();
   }
@@ -224,14 +228,14 @@
   private void sourceCheck() {
     // We observe IRVertex that do not have any incoming edges.
     final Supplier<Stream<V>> verticesToObserve = () -> vertices.stream().filter(v -> incomingEdges.get(v).isEmpty())
-        .filter(v -> v instanceof IRVertex);
+      .filter(v -> v instanceof IRVertex);
     // They should all match SourceVertex
     if (!(verticesToObserve.get().allMatch(v -> (v instanceof SourceVertex)
       || (v instanceof SamplingVertex && ((SamplingVertex) v).getCloneOfOriginalVertex() instanceof SourceVertex)))) {
       final String problematicVertices = verticesToObserve.get()
-          .filter(v -> !(v instanceof SourceVertex))
-          .map(V::getId)
-          .collect(Collectors.toList()).toString();
+        .filter(v -> !(v instanceof SourceVertex))
+        .map(V::getId)
+        .collect(Collectors.toList()).toString();
       throw getException("DAG source check failed while building DAG", problematicVertices);
     }
   }
@@ -242,14 +246,14 @@
   private void sinkCheck() {
     // We observe IRVertex that do not have any outgoing edges.
     final Supplier<Stream<V>> verticesToObserve = () -> vertices.stream()
-        .filter(v -> outgoingEdges.get(v).isEmpty())
-        .filter(v -> v instanceof IRVertex);
+      .filter(v -> outgoingEdges.get(v).isEmpty())
+      .filter(v -> v instanceof IRVertex);
     // They should either be OperatorVertex or LoopVertex
     if (verticesToObserve.get().anyMatch(v ->
       !(v instanceof OperatorVertex || v instanceof LoopVertex))) {
       final String problematicVertices = verticesToObserve.get().filter(v ->
-          !(v instanceof OperatorVertex || v instanceof LoopVertex))
-          .map(V::getId).collect(Collectors.toList()).toString();
+        !(v instanceof OperatorVertex || v instanceof LoopVertex))
+        .map(V::getId).collect(Collectors.toList()).toString();
       throw getException("DAG sink check failed while building DAG", problematicVertices);
     }
   }
@@ -285,7 +289,7 @@
       final Stack<V> stack = new Stack<>();
       final Set<V> visited = new HashSet<>();
       vertices.stream().filter(v -> incomingEdges.get(v).isEmpty()) // source operators
-          .forEachOrdered(v -> cycleCheck(stack, visited, v));
+        .forEachOrdered(v -> cycleCheck(stack, visited, v));
     }
     if (source) {
       sourceCheck();
@@ -332,7 +336,8 @@
 
   /**
    * Generates a user-friendly exception message.
-   * @param reason of the exception.
+   *
+   * @param reason             of the exception.
    * @param problematicObjects that caused the exception.
    * @return exception object.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/dag/DAGInterface.java b/common/src/main/java/org/apache/nemo/common/dag/DAGInterface.java
index eba2e4a..7040a9c 100644
--- a/common/src/main/java/org/apache/nemo/common/dag/DAGInterface.java
+++ b/common/src/main/java/org/apache/nemo/common/dag/DAGInterface.java
@@ -23,7 +23,8 @@
 import org.apache.nemo.common.ir.vertex.LoopVertex;
 
 import java.io.Serializable;
-import java.util.*;
+import java.util.List;
+import java.util.Set;
 import java.util.function.Consumer;
 import java.util.function.Predicate;
 
@@ -37,6 +38,7 @@
 public interface DAGInterface<V extends Vertex, E extends Edge<V>> extends Serializable {
   /**
    * Retrieves the vertex given its ID.
+   *
    * @param id of the vertex to retrieve
    * @return the vertex
    */
@@ -44,6 +46,7 @@
 
   /**
    * Retrieves the vertices of this DAG.
+   *
    * @return the list of vertices.
    * Note that the result is never null, ensured by {@link DAGBuilder}.
    */
@@ -51,18 +54,21 @@
 
   /**
    * Retrieves the edges of this DAG.
+   *
    * @return the list of edges.
    */
   List<E> getEdges();
 
   /**
    * Retrieves the root vertices of this DAG.
+   *
    * @return the list of root vertices.
    */
   List<V> getRootVertices();
 
   /**
    * Retrieves the incoming edges of the given vertex.
+   *
    * @param v the subject vertex.
    * @return the list of incoming edges to the vertex.
    * Note that the result is never null, ensured by {@link DAGBuilder}.
@@ -71,6 +77,7 @@
 
   /**
    * Retrieves the incoming edges of the given vertex.
+   *
    * @param vertexId the ID of the subject vertex.
    * @return the list of incoming edges to the vertex.
    * Note that the result is never null, ensured by {@link DAGBuilder}.
@@ -79,6 +86,7 @@
 
   /**
    * Retrieves the outgoing edges of the given vertex.
+   *
    * @param v the subject vertex.
    * @return the list of outgoing edges to the vertex.
    * Note that the result is never null, ensured by {@link DAGBuilder}.
@@ -87,6 +95,7 @@
 
   /**
    * Retrieves the outgoing edges of the given vertex.
+   *
    * @param vertexId the ID of the subject vertex.
    * @return the list of outgoing edges to the vertex.
    * Note that the result is never null, ensured by {@link DAGBuilder}.
@@ -95,6 +104,7 @@
 
   /**
    * Retrieves the parent vertices of the given vertex.
+   *
    * @param vertexId the ID of the subject vertex.
    * @return the list of parent vertices.
    */
@@ -102,6 +112,7 @@
 
   /**
    * Retrieves the children vertices of the given vertex.
+   *
    * @param vertexId the ID of the subject vertex.
    * @return the list of children vertices.
    */
@@ -109,6 +120,7 @@
 
   /**
    * Retrieves the edge between two vertices.
+   *
    * @param srcVertexId the ID of the source vertex.
    * @param dstVertexId the ID of the destination vertex.
    * @return the edge if exists.
@@ -119,12 +131,14 @@
   /**
    * Gets the DAG's vertices in topologically sorted order.
    * This function brings consistent results.
+   *
    * @return the sorted list of vertices in topological order.
    */
   List<V> getTopologicalSort();
 
   /**
    * Retrieves the ancestors of a vertex.
+   *
    * @param vertexId to find the ancestors for.
    * @return the list of ancestors.
    */
@@ -132,6 +146,7 @@
 
   /**
    * Retrieves the descendants of a vertex.
+   *
    * @param vertexId to find the descendants for.
    * @return the list of descendants.
    */
@@ -139,6 +154,7 @@
 
   /**
    * Filters the vertices according to the given condition.
+   *
    * @param condition that must be satisfied to be included in the filtered list.
    * @return the list of vertices that meet the condition.
    */
@@ -147,6 +163,7 @@
   /**
    * Applies the function to each node in the DAG in a topological order.
    * This function brings consistent results.
+   *
    * @param function to apply.
    */
   void topologicalDo(final Consumer<V> function);
@@ -161,17 +178,19 @@
 
   /**
    * Traverses the DAG by DFS, applying the given function.
-   * @param function to apply.
+   *
+   * @param function       to apply.
    * @param traversalOrder which the DFS should be conducted.
    */
   void dfsTraverse(final Consumer<V> function, final TraversalOrder traversalOrder);
 
   /**
    * A recursive helper function for {@link #dfsTraverse(Consumer, TraversalOrder)}.
-   * @param vertex the root vertex of the remaining DAG.
+   *
+   * @param vertex         the root vertex of the remaining DAG.
    * @param vertexConsumer the function to apply.
    * @param traversalOrder which the DFS should be conducted.
-   * @param visited the set of nodes visited.
+   * @param visited        the set of nodes visited.
    */
   void dfsDo(final V vertex,
              final Consumer<V> vertexConsumer,
@@ -180,6 +199,7 @@
 
   /**
    * Function checks whether there is a path between two vertices.
+   *
    * @param v1 First vertex to check.
    * @param v2 Second vertex to check.
    * @return Whether or not there is a path between two vertices.
@@ -188,6 +208,7 @@
 
   /**
    * Checks whether the given vertex is assigned with a wrapping LoopVertex.
+   *
    * @param v Vertex to check.
    * @return whether or not it is wrapped by a LoopVertex
    */
@@ -195,6 +216,7 @@
 
   /**
    * Retrieves the stack depth of the given vertex.
+   *
    * @param v Vertex to check.
    * @return The depth of the stack of LoopVertices for the vertex.
    */
@@ -202,6 +224,7 @@
 
   /**
    * Retrieves the wrapping LoopVertex of the vertex.
+   *
    * @param v Vertex to check.
    * @return The wrapping LoopVertex.
    */
@@ -214,8 +237,9 @@
 
   /**
    * Stores JSON representation of this DAG into a file.
-   * @param directory the directory which JSON representation is saved to
-   * @param name name of this DAG
+   *
+   * @param directory   the directory which JSON representation is saved to
+   * @param name        name of this DAG
    * @param description description of this DAG
    */
   void storeJSON(final String directory, final String name, final String description);
diff --git a/common/src/main/java/org/apache/nemo/common/dag/Edge.java b/common/src/main/java/org/apache/nemo/common/dag/Edge.java
index 32fddde..5c100bf 100644
--- a/common/src/main/java/org/apache/nemo/common/dag/Edge.java
+++ b/common/src/main/java/org/apache/nemo/common/dag/Edge.java
@@ -18,13 +18,13 @@
  */
 package org.apache.nemo.common.dag;
 
-import java.io.Serializable;
-
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.node.JsonNodeFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.Serializable;
+
 /**
  * Connects two vertices of a DAG.
  * This class can be extended for various DAG representations.
@@ -40,7 +40,8 @@
 
   /**
    * Constructor for Edge.
-   * @param id ID of the edge.
+   *
+   * @param id  ID of the edge.
    * @param src source vertex.
    * @param dst destination vertex.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/dag/Vertex.java b/common/src/main/java/org/apache/nemo/common/dag/Vertex.java
index 1596d4b..a6b07b7 100644
--- a/common/src/main/java/org/apache/nemo/common/dag/Vertex.java
+++ b/common/src/main/java/org/apache/nemo/common/dag/Vertex.java
@@ -42,6 +42,7 @@
   public final String getId() {
     return id;
   }
+
   /**
    * @return the numeric id of the vertex.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/eventhandler/CommonEventHandler.java b/common/src/main/java/org/apache/nemo/common/eventhandler/CommonEventHandler.java
index 515bdf0..14c6a36 100644
--- a/common/src/main/java/org/apache/nemo/common/eventhandler/CommonEventHandler.java
+++ b/common/src/main/java/org/apache/nemo/common/eventhandler/CommonEventHandler.java
@@ -22,11 +22,13 @@
 
 /**
  * Class for handling common events.
+ *
  * @param <T> type of the event to handle.
  */
 public interface CommonEventHandler<T> extends EventHandler<T> {
   /**
    * fetches the class of the event.
+   *
    * @return the class of the event.
    */
   Class<T> getEventClass();
diff --git a/common/src/main/java/org/apache/nemo/common/eventhandler/CompilerEventHandler.java b/common/src/main/java/org/apache/nemo/common/eventhandler/CompilerEventHandler.java
index fccdb48..22ba3e6 100644
--- a/common/src/main/java/org/apache/nemo/common/eventhandler/CompilerEventHandler.java
+++ b/common/src/main/java/org/apache/nemo/common/eventhandler/CompilerEventHandler.java
@@ -20,6 +20,7 @@
 
 /**
  * Class for handling events sent from Compiler.
+ *
  * @param <T> type of the compiler event to handle.
  */
 public interface CompilerEventHandler<T extends CompilerEvent> extends CommonEventHandler<T> {
diff --git a/common/src/main/java/org/apache/nemo/common/eventhandler/RuntimeEventHandler.java b/common/src/main/java/org/apache/nemo/common/eventhandler/RuntimeEventHandler.java
index 3cc28d9..8ac72f9 100644
--- a/common/src/main/java/org/apache/nemo/common/eventhandler/RuntimeEventHandler.java
+++ b/common/src/main/java/org/apache/nemo/common/eventhandler/RuntimeEventHandler.java
@@ -20,6 +20,7 @@
 
 /**
  * Class for handling events sent from Runtime.
+ *
  * @param <T> type of the runtime event to handle.
  */
 public interface RuntimeEventHandler<T extends RuntimeEvent> extends CommonEventHandler<T> {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/ContainerException.java b/common/src/main/java/org/apache/nemo/common/exception/ContainerException.java
index c5a1cfc..02a355a 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/ContainerException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/ContainerException.java
@@ -25,6 +25,7 @@
 public final class ContainerException extends RuntimeException {
   /**
    * ContainerException.
+   *
    * @param cause cause
    */
   public ContainerException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/IllegalEdgeOperationException.java b/common/src/main/java/org/apache/nemo/common/exception/IllegalEdgeOperationException.java
index 7b78c24..2c9585a 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/IllegalEdgeOperationException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/IllegalEdgeOperationException.java
@@ -27,6 +27,7 @@
 public final class IllegalEdgeOperationException extends RuntimeException {
   /**
    * IllegalEdgeOperationException.
+   *
    * @param cause cause
    */
   public IllegalEdgeOperationException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/IllegalMessageException.java b/common/src/main/java/org/apache/nemo/common/exception/IllegalMessageException.java
index 3716a8c..54d0d53 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/IllegalMessageException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/IllegalMessageException.java
@@ -25,6 +25,7 @@
 public final class IllegalMessageException extends RuntimeException {
   /**
    * IllegalMessageException.
+   *
    * @param cause cause
    */
   public IllegalMessageException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/IllegalStateTransitionException.java b/common/src/main/java/org/apache/nemo/common/exception/IllegalStateTransitionException.java
index d454361..6016588 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/IllegalStateTransitionException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/IllegalStateTransitionException.java
@@ -25,6 +25,7 @@
 public final class IllegalStateTransitionException extends Exception {
   /**
    * IllegalStateTransitionException.
+   *
    * @param cause cause
    */
   public IllegalStateTransitionException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/IllegalVertexOperationException.java b/common/src/main/java/org/apache/nemo/common/exception/IllegalVertexOperationException.java
index cdb4e82..fb990d5 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/IllegalVertexOperationException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/IllegalVertexOperationException.java
@@ -26,6 +26,7 @@
 public final class IllegalVertexOperationException extends RuntimeException {
   /**
    * IllegalVertexOperationException.
+   *
    * @param message message
    */
   public IllegalVertexOperationException(final String message) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/InvalidParameterException.java b/common/src/main/java/org/apache/nemo/common/exception/InvalidParameterException.java
index 70c7551..121c540 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/InvalidParameterException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/InvalidParameterException.java
@@ -25,6 +25,7 @@
 public final class InvalidParameterException extends RuntimeException {
   /**
    * InvalidParameterException.
+   *
    * @param message message
    */
   public InvalidParameterException(final String message) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/JsonParseException.java b/common/src/main/java/org/apache/nemo/common/exception/JsonParseException.java
index f84b931..7047944 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/JsonParseException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/JsonParseException.java
@@ -25,6 +25,7 @@
 public final class JsonParseException extends RuntimeException {
   /**
    * JsonParseException.
+   *
    * @param cause cause
    */
   public JsonParseException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/MetricException.java b/common/src/main/java/org/apache/nemo/common/exception/MetricException.java
index 93d46fc..0218423 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/MetricException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/MetricException.java
@@ -27,6 +27,7 @@
 
   /**
    * MetricException.
+   *
    * @param cause the cause of the exception.
    */
   public MetricException(final Throwable cause) {
@@ -35,6 +36,7 @@
 
   /**
    * MetricException.
+   *
    * @param cause the cause of the exception.
    */
   public MetricException(final String cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/NodeConnectionException.java b/common/src/main/java/org/apache/nemo/common/exception/NodeConnectionException.java
index 70e9f11..13e9b6d 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/NodeConnectionException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/NodeConnectionException.java
@@ -25,6 +25,7 @@
 public final class NodeConnectionException extends RuntimeException {
   /**
    * NodeConnectionException.
+   *
    * @param cause cause
    */
   public NodeConnectionException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/PhysicalPlanGenerationException.java b/common/src/main/java/org/apache/nemo/common/exception/PhysicalPlanGenerationException.java
index 3e3eb09..7fdace1 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/PhysicalPlanGenerationException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/PhysicalPlanGenerationException.java
@@ -27,6 +27,7 @@
 public final class PhysicalPlanGenerationException extends RuntimeException {
   /**
    * PhysicalPlanGenerationException.
+   *
    * @param message message
    */
   public PhysicalPlanGenerationException(final String message) {
@@ -35,6 +36,7 @@
 
   /**
    * PhysicalPlanGenerationException.
+   *
    * @param e throwable cause of the exception.
    */
   public PhysicalPlanGenerationException(final Throwable e) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/SchedulingException.java b/common/src/main/java/org/apache/nemo/common/exception/SchedulingException.java
index 40d2c20..72fea09 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/SchedulingException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/SchedulingException.java
@@ -26,6 +26,7 @@
 public final class SchedulingException extends RuntimeException {
   /**
    * SchedulingException.
+   *
    * @param exception exception
    */
   public SchedulingException(final Throwable exception) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/UnknownExecutionStateException.java b/common/src/main/java/org/apache/nemo/common/exception/UnknownExecutionStateException.java
index da6a198..0c32dc9 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/UnknownExecutionStateException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/UnknownExecutionStateException.java
@@ -25,6 +25,7 @@
 public final class UnknownExecutionStateException extends RuntimeException {
   /**
    * UnknownExecutionStateException.
+   *
    * @param cause cause
    */
   public UnknownExecutionStateException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/UnknownFailureCauseException.java b/common/src/main/java/org/apache/nemo/common/exception/UnknownFailureCauseException.java
index bbaa6ba..bf3b000 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/UnknownFailureCauseException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/UnknownFailureCauseException.java
@@ -25,6 +25,7 @@
 public final class UnknownFailureCauseException extends RuntimeException {
   /**
    * UnknownFailureCauseException.
+   *
    * @param cause cause
    */
   public UnknownFailureCauseException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/UnrecoverableFailureException.java b/common/src/main/java/org/apache/nemo/common/exception/UnrecoverableFailureException.java
index 9c4ffbb..3f053ae 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/UnrecoverableFailureException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/UnrecoverableFailureException.java
@@ -25,6 +25,7 @@
 public final class UnrecoverableFailureException extends RuntimeException {
   /**
    * UnrecoverableFailureException.
+   *
    * @param cause cause
    */
   public UnrecoverableFailureException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/UnsupportedBlockStoreException.java b/common/src/main/java/org/apache/nemo/common/exception/UnsupportedBlockStoreException.java
index c45403e..106e0a6 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/UnsupportedBlockStoreException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/UnsupportedBlockStoreException.java
@@ -25,6 +25,7 @@
 public final class UnsupportedBlockStoreException extends RuntimeException {
   /**
    * UnsupportedBlockStoreException.
+   *
    * @param cause cause
    */
   public UnsupportedBlockStoreException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/UnsupportedCommPatternException.java b/common/src/main/java/org/apache/nemo/common/exception/UnsupportedCommPatternException.java
index d801090..32adee2 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/UnsupportedCommPatternException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/UnsupportedCommPatternException.java
@@ -25,6 +25,7 @@
 public final class UnsupportedCommPatternException extends RuntimeException {
   /**
    * UnsupportedCommPatternException.
+   *
    * @param cause cause
    */
   public UnsupportedCommPatternException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/UnsupportedExecutionPropertyException.java b/common/src/main/java/org/apache/nemo/common/exception/UnsupportedExecutionPropertyException.java
index 8647ec6..4abc646 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/UnsupportedExecutionPropertyException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/UnsupportedExecutionPropertyException.java
@@ -25,6 +25,7 @@
 public final class UnsupportedExecutionPropertyException extends RuntimeException {
   /**
    * UnsupportedExecutionPropertyException.
+   *
    * @param message message
    */
   public UnsupportedExecutionPropertyException(final String message) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/UnsupportedMethodException.java b/common/src/main/java/org/apache/nemo/common/exception/UnsupportedMethodException.java
index 7919a38..88b1091 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/UnsupportedMethodException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/UnsupportedMethodException.java
@@ -25,6 +25,7 @@
 public final class UnsupportedMethodException extends RuntimeException {
   /**
    * UnsupportedMethodException.
+   *
    * @param message message
    */
   public UnsupportedMethodException(final String message) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/UnsupportedMetricException.java b/common/src/main/java/org/apache/nemo/common/exception/UnsupportedMetricException.java
index 4870b82..9628b27 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/UnsupportedMetricException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/UnsupportedMetricException.java
@@ -25,6 +25,7 @@
 public final class UnsupportedMetricException extends RuntimeException {
   /**
    * UnsupportedMetricException.
+   *
    * @param cause cause
    */
   public UnsupportedMetricException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/exception/UnsupportedPartitionerException.java b/common/src/main/java/org/apache/nemo/common/exception/UnsupportedPartitionerException.java
index ec0db17..749795c 100644
--- a/common/src/main/java/org/apache/nemo/common/exception/UnsupportedPartitionerException.java
+++ b/common/src/main/java/org/apache/nemo/common/exception/UnsupportedPartitionerException.java
@@ -25,6 +25,7 @@
 public final class UnsupportedPartitionerException extends RuntimeException {
   /**
    * UnsupportedPartitionerException.
+   *
    * @param cause cause
    */
   public UnsupportedPartitionerException(final Throwable cause) {
diff --git a/common/src/main/java/org/apache/nemo/common/ir/BoundedIteratorReadable.java b/common/src/main/java/org/apache/nemo/common/ir/BoundedIteratorReadable.java
index 586fc47..c1085fe 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/BoundedIteratorReadable.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/BoundedIteratorReadable.java
@@ -22,6 +22,7 @@
 
 /**
  * An abstract readable class that retrieves data from iterator.
+ *
  * @param <O> output type.
  */
 public abstract class BoundedIteratorReadable<O> implements Readable<O> {
@@ -30,6 +31,7 @@
 
   /**
    * Initialize iterator.
+   *
    * @return iterator
    */
   protected abstract Iterator<O> initializeIterator();
diff --git a/common/src/main/java/org/apache/nemo/common/ir/IRDAG.java b/common/src/main/java/org/apache/nemo/common/ir/IRDAG.java
index 7e37817..29af0a2 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/IRDAG.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/IRDAG.java
@@ -55,12 +55,12 @@
  * An IRDAG object captures a high-level data processing application (e.g., Spark/Beam application).
  * - IRVertex: A data-parallel operation. (e.g., map)
  * - IREdge: A data dependency between two operations. (e.g., shuffle)
- *
+ * <p>
  * Largely two types of IRDAG optimization(modification) methods are provided.
  * All of these methods preserve application semantics.
  * - Annotation: setProperty(), getPropertyValue() on each IRVertex/IREdge
  * - Reshaping: insert(), delete() on the IRDAG
- *
+ * <p>
  * TODO #341: Rethink IRDAG insert() signatures
  */
 @NotThreadSafe
@@ -98,6 +98,7 @@
 
   /**
    * Used internally by Nemo to advance the DAG snapshot after applying each pass.
+   *
    * @param checker that compares the dagSnapshot and the modifiedDAG
    *                to determine if the snapshot can be set the current modifiedDAG.
    * @return true if the checker passes, false otherwise.
@@ -121,7 +122,7 @@
   /**
    * Deletes a previously inserted utility vertex.
    * (e.g., MessageBarrierVertex, StreamVertex, SamplingVertex)
-   *
+   * <p>
    * Notice that the actual number of vertices that will be deleted after this call returns can be more than one.
    * We roll back the changes made with the previous insert(), while preserving application semantics.
    *
@@ -155,13 +156,13 @@
   /**
    * Delete a group of vertex that corresponds to the specified vertex.
    * And then recursively delete neighboring utility vertices.
-   *
+   * <p>
    * (WARNING) Only call this method inside delete(), or inside this method itself.
    * This method uses buildWithoutSourceSinkCheck() for intermediate DAGs,
    * which will be finally checked in delete().
    *
    * @param vertexToDelete to delete
-   * @param visited vertex groups (because cyclic dependencies between vertex groups are possible)
+   * @param visited        vertex groups (because cyclic dependencies between vertex groups are possible)
    */
   private void deleteRecursively(final IRVertex vertexToDelete, final Set<IRVertex> visited) {
     if (!Util.isUtilityVertex(vertexToDelete)) {
@@ -203,8 +204,9 @@
           modifiedDAG.getIncomingEdgesOf(vertexToDelete).stream()
             .filter(e -> !Util.isControlEdge(e))
             .map(IREdge::getSrc)
-            .forEach(srcVertex-> { builder.connectVertices(
-              Util.cloneEdge(streamVertexToOriginalEdge.get(vertexToDelete), srcVertex, dstVertex));
+            .forEach(srcVertex -> {
+              builder.connectVertices(
+                Util.cloneEdge(streamVertexToOriginalEdge.get(vertexToDelete), srcVertex, dstVertex));
             });
         });
       modifiedDAG = builder.buildWithoutSourceSinkCheck();
@@ -237,14 +239,14 @@
 
   /**
    * Inserts a new vertex that streams data.
-   *
+   * <p>
    * Before: src - edgeToStreamize - dst
    * After: src - edgeToStreamizeWithNewDestination - streamVertex - oneToOneEdge - dst
    * (replaces the "Before" relationships)
-   *
+   * <p>
    * This preserves semantics as the streamVertex simply forwards data elements from the input edge to the output edge.
    *
-   * @param streamVertex to insert.
+   * @param streamVertex    to insert.
    * @param edgeToStreamize to modify.
    */
   public void insert(final StreamVertex streamVertex, final IREdge edgeToStreamize) {
@@ -319,24 +321,24 @@
 
   /**
    * Inserts a new vertex that analyzes intermediate data, and triggers a dynamic optimization.
-   *
+   * <p>
    * For each edge in edgesToGetStatisticsOf...
-   *
+   * <p>
    * Before: src - edge - dst
    * After: src - oneToOneEdge(a clone of edge) - messageBarrierVertex -
-   *        shuffleEdge - messageAggregatorVertex - broadcastEdge - dst
+   * shuffleEdge - messageAggregatorVertex - broadcastEdge - dst
    * (the "Before" relationships are unmodified)
-   *
+   * <p>
    * This preserves semantics as the results of the inserted message vertices are never consumed by the original IRDAG.
-   *
+   * <p>
    * TODO #345: Simplify insert(MessageBarrierVertex)
    *
-   * @param messageBarrierVertex to insert.
+   * @param messageBarrierVertex    to insert.
    * @param messageAggregatorVertex to insert.
-   * @param mbvOutputEncoder to use.
-   * @param mbvOutputDecoder to use.
-   * @param edgesToGetStatisticsOf to examine.
-   * @param edgesToOptimize to optimize.
+   * @param mbvOutputEncoder        to use.
+   * @param mbvOutputDecoder        to use.
+   * @param edgesToGetStatisticsOf  to examine.
+   * @param edgesToOptimize         to optimize.
    */
   public void insert(final MessageBarrierVertex messageBarrierVertex,
                      final MessageAggregatorVertex messageAggregatorVertex,
@@ -430,29 +432,29 @@
 
   /**
    * Inserts a set of samplingVertices that process sampled data.
-   *
+   * <p>
    * This method automatically inserts the following three types of edges.
    * (1) Edges between samplingVertices to reflect the original relationship
    * (2) Edges from the original IRDAG to samplingVertices that clone the inEdges of the original vertices
    * (3) Edges from the samplingVertices to the original IRDAG to respect executeAfterSamplingVertices
-   *
+   * <p>
    * Suppose the caller supplies the following arguments to perform a "sampled run" of vertices {V1, V2},
    * prior to executing them.
    * - samplingVertices: {V1', V2'}
    * - childrenOfSamplingVertices: {V1}
-   *
+   * <p>
    * Before: V1 - oneToOneEdge - V2 - shuffleEdge - V3
    * After: V1' - oneToOneEdge - V2' - controlEdge - V1 - oneToOneEdge - V2 - shuffleEdge - V3
-   *
+   * <p>
    * This preserves semantics as the original IRDAG remains unchanged and unaffected.
-   *
+   * <p>
    * (Future calls to insert() can add new vertices that connect to sampling vertices. Such new vertices will also be
    * wrapped with sampling vertices, as new vertices that consume outputs from sampling vertices will process
    * a subset of data anyways, and no such new vertex will reach the original DAG except via control edges)
-   *
+   * <p>
    * TODO #343: Extend SamplingVertex control edges
    *
-   * @param toInsert sampling vertices.
+   * @param toInsert     sampling vertices.
    * @param executeAfter that must be executed after toInsert.
    */
   public void insert(final Set<SamplingVertex> toInsert,
@@ -523,6 +525,7 @@
   /**
    * Reshape unsafely, without guarantees on preserving application semantics.
    * TODO #330: Refactor Unsafe Reshaping Passes
+   *
    * @param unsafeReshapingFunction takes as input the underlying DAG, and outputs a reshaped DAG.
    */
   public void reshapeUnsafely(final Function<DAG<IRVertex, IREdge>, DAG<IRVertex, IREdge>> unsafeReshapingFunction) {
@@ -567,8 +570,8 @@
   }
 
   /**
-   * @param mbv src.
-   * @param mav dst.
+   * @param mbv     src.
+   * @param mav     dst.
    * @param encoder src-dst encoder.
    * @param decoder src-dst decoder.
    * @return the edge.
diff --git a/common/src/main/java/org/apache/nemo/common/ir/IdManager.java b/common/src/main/java/org/apache/nemo/common/ir/IdManager.java
index 389af8b..1aad410 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/IdManager.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/IdManager.java
@@ -53,7 +53,8 @@
   /**
    * Save the vertex id for the vertices that can be cloned later on.
    * WARN: this should guarantee that the vertex is no longer used, otherwise, it would result in duplicate IDs.
-   * @param v the original vertex that is to be cloned later on (RootLoopVertex's vertex).
+   *
+   * @param v  the original vertex that is to be cloned later on (RootLoopVertex's vertex).
    * @param id The IDs of the identical vertices.
    */
   public static void saveVertexId(final Vertex v, final String id) {
@@ -66,6 +67,7 @@
    * otherwise simply acts as the newVertexId method.
    * WARN: the #saveVertexId method should no longer use the ID saved at that moment,
    * in order for this method to work correctly.
+   *
    * @param v the vertex to get the ID for.
    * @return the ID for the vertex.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/OutputCollector.java b/common/src/main/java/org/apache/nemo/common/ir/OutputCollector.java
index 3fe333a..0865093 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/OutputCollector.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/OutputCollector.java
@@ -26,17 +26,20 @@
  * Interface through which Transform emits outputs.
  * This is to be implemented in the runtime with
  * runtime-specific distributed data movement and storage mechanisms.
+ *
  * @param <O> output type.
  */
 public interface OutputCollector<O> extends Serializable {
   /**
    * Single-destination emit.
+   *
    * @param output value.
    */
   void emit(O output);
 
   /**
    * Emit watermark to downstream vertices.
+   *
    * @param watermark watermark
    */
   void emitWatermark(Watermark watermark);
@@ -45,9 +48,10 @@
    * Multi-destination emit.
    * Currently unused, but might come in handy
    * for operations like multi-output map.
+   *
    * @param dstVertexId destination vertex id.
-   * @param output value.
-   * @param <T> output type.
+   * @param output      value.
+   * @param <T>         output type.
    */
   <T> void emit(String dstVertexId, T output);
 }
diff --git a/common/src/main/java/org/apache/nemo/common/ir/Readable.java b/common/src/main/java/org/apache/nemo/common/ir/Readable.java
index c18c9e1..63a6b54 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/Readable.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/Readable.java
@@ -25,6 +25,7 @@
 
 /**
  * Interface for readable.
+ *
  * @param <O> output type.
  */
 public interface Readable<O> extends Serializable {
@@ -38,8 +39,9 @@
    * Method to read current data from the source.
    * The caller should check whether the Readable is finished or not by using isFinished() method
    * before calling this method.
-   *
+   * <p>
    * It can throw NoSuchElementException although it is not finished in Unbounded source.
+   *
    * @return a data read by the readable.
    * @throws NoSuchElementException when no element exists
    */
@@ -47,6 +49,7 @@
 
   /**
    * Read watermark.
+   *
    * @return watermark
    */
   long readWatermark();
@@ -68,6 +71,7 @@
 
   /**
    * Close.
+   *
    * @throws IOException if file-based reader throws any.
    */
   void close() throws IOException;
diff --git a/common/src/main/java/org/apache/nemo/common/ir/edge/IREdge.java b/common/src/main/java/org/apache/nemo/common/ir/edge/IREdge.java
index 969fd5b..a7774fb 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/edge/IREdge.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/edge/IREdge.java
@@ -20,6 +20,7 @@
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.nemo.common.dag.Edge;
 import org.apache.nemo.common.ir.IdManager;
 import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
@@ -27,10 +28,8 @@
 import org.apache.nemo.common.ir.executionproperty.ExecutionPropertyMap;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 
-import org.apache.commons.lang3.builder.HashCodeBuilder;
-
 import java.io.Serializable;
-import java.util.*;
+import java.util.Optional;
 
 /**
  * Physical execution plan of intermediate data movement.
@@ -54,6 +53,7 @@
 
   /**
    * Set an executionProperty of the IREdge.
+   *
    * @param executionProperty the execution property.
    * @return the IREdge with the execution property set.
    */
@@ -64,6 +64,7 @@
 
   /**
    * Set an executionProperty of the IREdge, permanently.
+   *
    * @param executionProperty the execution property.
    * @return the IREdge with the execution property set.
    */
@@ -80,7 +81,7 @@
    * @return the execution property.
    */
   public <T extends Serializable> Optional<T> getPropertyValue(
-      final Class<? extends EdgeExecutionProperty<T>> executionPropertyKey) {
+    final Class<? extends EdgeExecutionProperty<T>> executionPropertyKey) {
     return executionProperties.get(executionPropertyKey);
   }
 
@@ -125,10 +126,10 @@
   @Override
   public int hashCode() {
     return new HashCodeBuilder(17, 37)
-        .append(getSrc().hashCode())
-        .append(getDst().hashCode())
-        .append(executionProperties)
-        .toHashCode();
+      .append(getSrc().hashCode())
+      .append(getDst().hashCode())
+      .append(executionProperties)
+      .toHashCode();
   }
 
   @Override
diff --git a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/AdditionalOutputTagProperty.java b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/AdditionalOutputTagProperty.java
index 4c85cec..422fb00 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/AdditionalOutputTagProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/AdditionalOutputTagProperty.java
@@ -27,6 +27,7 @@
 
   /**
    * Constructor.
+   *
    * @param value tag id of additional input.
    */
   private AdditionalOutputTagProperty(final String value) {
@@ -35,6 +36,7 @@
 
   /**
    * Static method exposing constructor.
+   *
    * @param value tag id of additional input.
    * @return the newly created execution property.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/CommunicationPatternProperty.java b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/CommunicationPatternProperty.java
index 7f35c81..a6971b9 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/CommunicationPatternProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/CommunicationPatternProperty.java
@@ -21,13 +21,15 @@
 import org.apache.nemo.common.ir.executionproperty.EdgeExecutionProperty;
 
 // TODO #492: modularizing runtime components for data communication pattern.
+
 /**
  * DataCommunicationPattern ExecutionProperty.
  */
 public final class CommunicationPatternProperty
-    extends EdgeExecutionProperty<CommunicationPatternProperty.Value> {
+  extends EdgeExecutionProperty<CommunicationPatternProperty.Value> {
   /**
    * Constructor.
+   *
    * @param value value of the execution property.
    */
   private CommunicationPatternProperty(final Value value) {
@@ -36,6 +38,7 @@
 
   /**
    * Static method exposing the constructor.
+   *
    * @param value value of the new execution property.
    * @return the newly created execution property.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DataFlowProperty.java b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DataFlowProperty.java
index eccb120..5c8de19 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DataFlowProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DataFlowProperty.java
@@ -26,6 +26,7 @@
 public final class DataFlowProperty extends EdgeExecutionProperty<DataFlowProperty.Value> {
   /**
    * Constructor.
+   *
    * @param value value of the execution property.
    */
   private DataFlowProperty(final Value value) {
@@ -34,6 +35,7 @@
 
   /**
    * Static method exposing the constructor.
+   *
    * @param value value of the new execution property.
    * @return the newly created execution property.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DataPersistenceProperty.java b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DataPersistenceProperty.java
index b4bccee..51bbfcb 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DataPersistenceProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DataPersistenceProperty.java
@@ -26,6 +26,7 @@
 public final class DataPersistenceProperty extends EdgeExecutionProperty<DataPersistenceProperty.Value> {
   /**
    * Constructor.
+   *
    * @param value value of the execution property.
    */
   private DataPersistenceProperty(final Value value) {
@@ -34,6 +35,7 @@
 
   /**
    * Static method exposing the constructor.
+   *
    * @param value value of the new execution property.
    * @return the newly created execution property.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DataStoreProperty.java b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DataStoreProperty.java
index e53157b..c2dc191 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DataStoreProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DataStoreProperty.java
@@ -26,6 +26,7 @@
 public final class DataStoreProperty extends EdgeExecutionProperty<DataStoreProperty.Value> {
   /**
    * Constructor.
+   *
    * @param value value of the execution property.
    */
   private DataStoreProperty(final Value value) {
@@ -34,6 +35,7 @@
 
   /**
    * Static method exposing the constructor.
+   *
    * @param value value of the new execution property.
    * @return the newly created execution property.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DuplicateEdgeGroupProperty.java b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DuplicateEdgeGroupProperty.java
index 61f3635..68facb8 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DuplicateEdgeGroupProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/DuplicateEdgeGroupProperty.java
@@ -27,6 +27,7 @@
 public final class DuplicateEdgeGroupProperty extends EdgeExecutionProperty<DuplicateEdgeGroupPropertyValue> {
   /**
    * Constructor.
+   *
    * @param value value of the execution property.
    */
   private DuplicateEdgeGroupProperty(final DuplicateEdgeGroupPropertyValue value) {
@@ -35,6 +36,7 @@
 
   /**
    * Static method exposing the constructor.
+   *
    * @param value value of the new execution property.
    * @return the newly created execution property.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/MessageIdEdgeProperty.java b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/MessageIdEdgeProperty.java
index 4a5d320..3878c9a 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/MessageIdEdgeProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/MessageIdEdgeProperty.java
@@ -28,6 +28,7 @@
 public final class MessageIdEdgeProperty extends EdgeExecutionProperty<HashSet<Integer>> {
   /**
    * Constructor.
+   *
    * @param value value of the execution property.
    */
   private MessageIdEdgeProperty(final HashSet<Integer> value) {
@@ -36,6 +37,7 @@
 
   /**
    * Static method exposing the constructor.
+   *
    * @param value value of the new execution property.
    * @return the newly created execution property.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/PartitionSetProperty.java b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/PartitionSetProperty.java
index 15b291a..ec06555 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/PartitionSetProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/PartitionSetProperty.java
@@ -26,14 +26,14 @@
 /**
  * This property decides which partitions the tasks of the destination IRVertex should fetch.
  * The position of a KeyRange in the list corresponds to the offset of the destination task.
- *
+ * <p>
  * For example, in the following setup:
  * Source IRVertex (Parallelism=2) - IREdge (Partitioner.Num=4) - Destination IRVertex (Parallelism=2)
- *
+ * <p>
  * Setting PartitionSetProperty([0, 3), [3, 3)) on the IREdge with will enforce the following behaviors.
  * - The first destination task fetches the first 3 partitions from each of the 2 data blocks
  * - The second destination task fetches the last partitions from each of the 2 data blocks
- *
+ * <p>
  * This property is useful for handling data skews.
  * For example, if the size ratio of the 4 partitions in the above setup are (17%, 16%, 17%, 50%),
  * then each of the destination task will evenly handle 50% of the load.
diff --git a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/PartitionerProperty.java b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/PartitionerProperty.java
index 78f321a..4da595a 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/PartitionerProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/edge/executionproperty/PartitionerProperty.java
@@ -48,7 +48,7 @@
   }
 
   /**
-   * @param type of the partitioner.
+   * @param type            of the partitioner.
    * @param numOfPartitions to create.
    * @return the property.
    */
@@ -57,9 +57,9 @@
   }
 
   /**
-   * @param type of the partitioner.
+   * @param type            of the partitioner.
    * @param numOfPartitions to create.
-   * @param auto if the number of partitions is auto.
+   * @param auto            if the number of partitions is auto.
    * @return the property.
    */
   private static PartitionerProperty of(final Type type, final int numOfPartitions, final boolean auto) {
diff --git a/common/src/main/java/org/apache/nemo/common/ir/executionproperty/EdgeExecutionProperty.java b/common/src/main/java/org/apache/nemo/common/ir/executionproperty/EdgeExecutionProperty.java
index 6dfdfe9..0b8647c 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/executionproperty/EdgeExecutionProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/executionproperty/EdgeExecutionProperty.java
@@ -22,11 +22,13 @@
 
 /**
  * {@link ExecutionProperty} for {@link org.apache.nemo.common.ir.edge.IREdge}.
+ *
  * @param <T> Type of the value.
  */
 public abstract class EdgeExecutionProperty<T extends Serializable> extends ExecutionProperty<T> {
   /**
    * Default constructor.
+   *
    * @param value value of the EdgeExecutionProperty.
    */
   public EdgeExecutionProperty(final T value) {
diff --git a/common/src/main/java/org/apache/nemo/common/ir/executionproperty/ExecutionProperty.java b/common/src/main/java/org/apache/nemo/common/ir/executionproperty/ExecutionProperty.java
index 008d13c..a73034d 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/executionproperty/ExecutionProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/executionproperty/ExecutionProperty.java
@@ -22,6 +22,7 @@
 
 /**
  * An abstract class for each execution factors.
+ *
  * @param <T> Type of the value.
  */
 public abstract class ExecutionProperty<T extends Serializable> implements Serializable {
@@ -29,6 +30,7 @@
 
   /**
    * Default constructor.
+   *
    * @param value value of the ExecutionProperty.
    */
   public ExecutionProperty(final T value) {
diff --git a/common/src/main/java/org/apache/nemo/common/ir/executionproperty/ExecutionPropertyMap.java b/common/src/main/java/org/apache/nemo/common/ir/executionproperty/ExecutionPropertyMap.java
index 2d2e8ad..8a36b94 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/executionproperty/ExecutionPropertyMap.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/executionproperty/ExecutionPropertyMap.java
@@ -20,6 +20,8 @@
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.nemo.common.coder.DecoderFactory;
 import org.apache.nemo.common.coder.EncoderFactory;
 import org.apache.nemo.common.exception.CompileTimeOptimizationException;
@@ -28,9 +30,6 @@
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.vertex.executionproperty.ResourcePriorityProperty;
 
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
-
 import javax.annotation.concurrent.NotThreadSafe;
 import java.io.Serializable;
 import java.util.*;
@@ -40,6 +39,7 @@
 
 /**
  * ExecutionPropertyMap Class, which uses HashMap for keeping track of ExecutionProperties for vertices and edges.
+ *
  * @param <T> Type of {@link ExecutionProperty} this map stores.
  */
 @NotThreadSafe
@@ -50,6 +50,7 @@
 
   /**
    * Constructor for ExecutionPropertyMap class.
+   *
    * @param id ID of the vertex / edge to keep the execution property of.
    */
   @VisibleForTesting
@@ -59,13 +60,14 @@
 
   /**
    * Static initializer for irEdges.
-   * @param irEdge irEdge to keep the execution property of.
+   *
+   * @param irEdge      irEdge to keep the execution property of.
    * @param commPattern Data communication pattern type of the edge.
    * @return The corresponding ExecutionPropertyMap.
    */
   public static ExecutionPropertyMap<EdgeExecutionProperty> of(
-      final IREdge irEdge,
-      final CommunicationPatternProperty.Value commPattern) {
+    final IREdge irEdge,
+    final CommunicationPatternProperty.Value commPattern) {
     final ExecutionPropertyMap<EdgeExecutionProperty> map = new ExecutionPropertyMap<>(irEdge.getId());
     map.put(CommunicationPatternProperty.of(commPattern));
     map.put(EncoderProperty.of(EncoderFactory.DUMMY_ENCODER_FACTORY));
@@ -94,6 +96,7 @@
 
   /**
    * Static initializer for irVertex.
+   *
    * @param irVertex irVertex to keep the execution property of.
    * @return The corresponding ExecutionPropertyMap.
    */
@@ -105,6 +108,7 @@
 
   /**
    * ID of the item this ExecutionPropertyMap class is keeping track of.
+   *
    * @return the ID of the item this ExecutionPropertyMap class is keeping track of.
    */
   public String getId() {
@@ -113,6 +117,7 @@
 
   /**
    * Put the given execution property  in the ExecutionPropertyMap. By default, it does not finalize the property.
+   *
    * @param executionProperty execution property to insert.
    * @return the previous execution property, or null if there was no execution property
    * with the specified property key.
@@ -123,19 +128,20 @@
 
   /**
    * Put the given execution property in the ExecutionPropertyMap.
+   *
    * @param executionProperty execution property to insert.
-   * @param finalize whether or not to finalize the execution property.
+   * @param finalize          whether or not to finalize the execution property.
    * @return the previous execution property, or null if there was no execution property
    * with the specified property key.
    */
   public T put(final T executionProperty, final Boolean finalize) {
     // check if the property has been already finalized. We don't mind overwriting an identical value.
     if (finalizedProperties.contains(executionProperty.getClass())
-        && properties.get(executionProperty.getClass()) != null
-        && !properties.get(executionProperty.getClass()).equals(executionProperty)) {
+      && properties.get(executionProperty.getClass()) != null
+      && !properties.get(executionProperty.getClass()).equals(executionProperty)) {
       throw new CompileTimeOptimizationException("Trying to overwrite a finalized execution property ["
-          + executionProperty.getClass().getSimpleName() + "] from ["
-          + properties.get(executionProperty.getClass()).getValue() + "] to [" + executionProperty.getValue() + "]");
+        + executionProperty.getClass().getSimpleName() + "] from ["
+        + properties.get(executionProperty.getClass()).getValue() + "] to [" + executionProperty.getValue() + "]");
     }
 
     // start the actual put process.
@@ -147,7 +153,8 @@
 
   /**
    * Get the value of the given execution property type.
-   * @param <U> Type of the return value.
+   *
+   * @param <U>                  Type of the return value.
    * @param executionPropertyKey the execution property type to find the value of.
    * @return the value of the given execution property.
    */
@@ -158,6 +165,7 @@
 
   /**
    * remove the execution property.
+   *
    * @param key key of the execution property to remove.
    * @return the removed execution property
    */
@@ -175,6 +183,7 @@
 
   /**
    * Same as forEach function in Java 8, but for execution properties.
+   *
    * @param action action to apply to each of the execution properties.
    */
   public void forEachProperties(final Consumer<? super T> action) {
@@ -216,13 +225,13 @@
     }
     final ExecutionPropertyMap that = (ExecutionPropertyMap) obj;
     return properties.values().stream().collect(Collectors.toSet())
-        .equals(that.properties.values().stream().collect(Collectors.toSet()));
+      .equals(that.properties.values().stream().collect(Collectors.toSet()));
   }
 
   @Override
   public int hashCode() {
     return new HashCodeBuilder(17, 37)
-        .append(properties.values().stream().map(ExecutionProperty::getValue).collect(Collectors.toSet()))
-        .toHashCode();
+      .append(properties.values().stream().map(ExecutionProperty::getValue).collect(Collectors.toSet()))
+      .toHashCode();
   }
 }
diff --git a/common/src/main/java/org/apache/nemo/common/ir/executionproperty/VertexExecutionProperty.java b/common/src/main/java/org/apache/nemo/common/ir/executionproperty/VertexExecutionProperty.java
index a5fabd8..aa04dc0 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/executionproperty/VertexExecutionProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/executionproperty/VertexExecutionProperty.java
@@ -22,11 +22,13 @@
 
 /**
  * {@link ExecutionProperty} for {@link org.apache.nemo.common.ir.vertex.IRVertex}.
+ *
  * @param <T> Type of the value.
  */
 public abstract class VertexExecutionProperty<T extends Serializable> extends ExecutionProperty<T> {
   /**
    * Default constructor.
+   *
    * @param value value of the VertexExecutionProperty.
    */
   public VertexExecutionProperty(final T value) {
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/CachedSourceVertex.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/CachedSourceVertex.java
index 331a635..d5ff4b5 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/CachedSourceVertex.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/CachedSourceVertex.java
@@ -27,6 +27,7 @@
 /**
  * Bounded source vertex for cached data.
  * It does not have actual data but just wraps the cached input data.
+ *
  * @param <T> the type of data to emit.
  */
 public final class CachedSourceVertex<T> extends SourceVertex<T> {
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/IRVertex.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/IRVertex.java
index 378fc40..4a3c6df 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/IRVertex.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/IRVertex.java
@@ -20,11 +20,11 @@
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.apache.nemo.common.Cloneable;
+import org.apache.nemo.common.dag.Vertex;
 import org.apache.nemo.common.ir.IdManager;
 import org.apache.nemo.common.ir.executionproperty.ExecutionPropertyMap;
-import org.apache.nemo.common.dag.Vertex;
 import org.apache.nemo.common.ir.executionproperty.VertexExecutionProperty;
-import org.apache.nemo.common.Cloneable;
 
 import java.io.Serializable;
 import java.util.Optional;
@@ -57,6 +57,7 @@
 
   /**
    * Static function to copy executionProperties from a vertex to the other.
+   *
    * @param thatVertex the edge to copy executionProperties to.
    */
   public final void copyExecutionPropertiesTo(final IRVertex thatVertex) {
@@ -87,12 +88,13 @@
 
   /**
    * Get the executionProperty of the IRVertex.
-   * @param <T> Type of the return value.
+   *
+   * @param <T>                  Type of the return value.
    * @param executionPropertyKey key of the execution property.
    * @return the execution property.
    */
   public final <T extends Serializable> Optional<T> getPropertyValue(
-      final Class<? extends VertexExecutionProperty<T>> executionPropertyKey) {
+    final Class<? extends VertexExecutionProperty<T>> executionPropertyKey) {
     return executionProperties.get(executionPropertyKey);
   }
 
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/InMemorySourceVertex.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/InMemorySourceVertex.java
index 43936d8..1cf4e7d 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/InMemorySourceVertex.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/InMemorySourceVertex.java
@@ -18,9 +18,9 @@
  */
 package org.apache.nemo.common.ir.vertex;
 
+import org.apache.nemo.common.Util;
 import org.apache.nemo.common.ir.BoundedIteratorReadable;
 import org.apache.nemo.common.ir.Readable;
-import org.apache.nemo.common.Util;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -29,6 +29,7 @@
 
 /**
  * Source vertex with the data in memory.
+ *
  * @param <T> type of data.
  */
 public final class InMemorySourceVertex<T> extends SourceVertex<T> {
@@ -100,6 +101,7 @@
 
   /**
    * Simply returns the in-memory data.
+   *
    * @param <T> type of the data.
    */
   private static final class InMemorySourceReadable<T> extends BoundedIteratorReadable<T> {
@@ -108,6 +110,7 @@
 
     /**
      * Constructor.
+     *
      * @param initializedSourceData the source data.
      */
     private InMemorySourceReadable(final Iterable<T> initializedSourceData) {
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/LoopVertex.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/LoopVertex.java
index b7a29e6..0f6ea2e 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/LoopVertex.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/LoopVertex.java
@@ -30,7 +30,10 @@
 import org.apache.nemo.common.ir.edge.executionproperty.DuplicateEdgeGroupPropertyValue;
 
 import java.io.Serializable;
-import java.util.*;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
 import java.util.function.IntPredicate;
 
 /**
@@ -118,7 +121,8 @@
 
   /**
    * Maps an edge from/to loop with the corresponding edge from/to internal vertex.
-   * @param edgeWithLoop an edge from/to loop
+   *
+   * @param edgeWithLoop           an edge from/to loop
    * @param edgeWithInternalVertex the corresponding edge from/to internal vertex
    */
   public void mapEdgeWithLoop(final IREdge edgeWithLoop, final IREdge edgeWithInternalVertex) {
@@ -136,6 +140,7 @@
 
   /**
    * Adds the incoming edge of the contained DAG.
+   *
    * @param edge edge to add.
    */
   public void addDagIncomingEdge(final IREdge edge) {
@@ -152,12 +157,14 @@
 
   /**
    * Adds an iterative incoming edge, from the previous iteration, but connection internally.
+   *
    * @param edge edge to add.
    */
   public void addIterativeIncomingEdge(final IREdge edge) {
     this.iterativeIncomingEdges.putIfAbsent(edge.getDst(), new HashSet<>());
     this.iterativeIncomingEdges.get(edge.getDst()).add(edge);
   }
+
   /**
    * @return the iterative incoming edges inside the DAG.
    */
@@ -167,6 +174,7 @@
 
   /**
    * Adds a non-iterative incoming edge, from outside the previous iteration.
+   *
    * @param edge edge to add.
    */
   public void addNonIterativeIncomingEdge(final IREdge edge) {
@@ -183,12 +191,14 @@
 
   /**
    * Adds and outgoing edge of the contained DAG.
+   *
    * @param edge edge to add.
    */
   public void addDagOutgoingEdge(final IREdge edge) {
     this.dagOutgoingEdges.putIfAbsent(edge.getSrc(), new HashSet<>());
     this.dagOutgoingEdges.get(edge.getSrc()).add(edge);
   }
+
   /**
    * @return outgoing edges of the contained DAG.
    */
@@ -202,13 +212,14 @@
   public void markDuplicateEdges() {
     nonIterativeIncomingEdges.forEach(((irVertex, irEdges) -> irEdges.forEach(irEdge -> {
       irEdge.setProperty(
-          DuplicateEdgeGroupProperty.of(new DuplicateEdgeGroupPropertyValue(String.valueOf(duplicateEdgeGroupId))));
+        DuplicateEdgeGroupProperty.of(new DuplicateEdgeGroupPropertyValue(String.valueOf(duplicateEdgeGroupId))));
       duplicateEdgeGroupId++;
     })));
   }
 
   /**
    * Method for unrolling an iteration of the LoopVertex.
+   *
    * @param dagBuilder DAGBuilder to add the unrolled iteration to.
    * @return a LoopVertex with one less maximum iteration.
    */
@@ -236,7 +247,7 @@
     // process DAG incoming edges.
     getDagIncomingEdges().forEach((dstVertex, irEdges) -> irEdges.forEach(edge -> {
       final IREdge newIrEdge = new IREdge(edge.getPropertyValue(CommunicationPatternProperty.class).get(),
-          edge.getSrc(), originalToNewIRVertex.get(dstVertex));
+        edge.getSrc(), originalToNewIRVertex.get(dstVertex));
       edge.copyExecutionPropertiesTo(newIrEdge);
       dagBuilder.connectVertices(newIrEdge);
     }));
@@ -245,7 +256,7 @@
       // if termination condition met, we process the DAG outgoing edge.
       getDagOutgoingEdges().forEach((srcVertex, irEdges) -> irEdges.forEach(edge -> {
         final IREdge newIrEdge = new IREdge(edge.getPropertyValue(CommunicationPatternProperty.class).get(),
-            originalToNewIRVertex.get(srcVertex), edge.getDst());
+          originalToNewIRVertex.get(srcVertex), edge.getDst());
         edge.copyExecutionPropertiesTo(newIrEdge);
         dagBuilder.addVertex(edge.getDst()).connectVertices(newIrEdge);
       }));
@@ -256,7 +267,7 @@
     this.nonIterativeIncomingEdges.forEach((dstVertex, irEdges) -> irEdges.forEach(this::addDagIncomingEdge));
     this.iterativeIncomingEdges.forEach((dstVertex, irEdges) -> irEdges.forEach(edge -> {
       final IREdge newIrEdge = new IREdge(edge.getPropertyValue(CommunicationPatternProperty.class).get(),
-          originalToNewIRVertex.get(edge.getSrc()), dstVertex);
+        originalToNewIRVertex.get(edge.getSrc()), dstVertex);
       edge.copyExecutionPropertiesTo(newIrEdge);
       this.addDagIncomingEdge(newIrEdge);
     }));
@@ -270,6 +281,7 @@
   public Boolean loopTerminationConditionMet() {
     return loopTerminationConditionMet(maxNumberOfIterations);
   }
+
   /**
    * @param intPredicateInput input for the intPredicate of the loop termination condition.
    * @return whether or not the loop termination condition has been met.
@@ -280,29 +292,34 @@
 
   /**
    * Set the maximum number of iterations.
+   *
    * @param maxNum maximum number of iterations.
    */
   public void setMaxNumberOfIterations(final Integer maxNum) {
     this.maxNumberOfIterations = maxNum;
   }
+
   /**
    * @return termination condition int predicate.
    */
   public IntPredicate getTerminationCondition() {
     return terminationCondition;
   }
+
   /**
    * @return maximum number of iterations.
    */
   public Integer getMaxNumberOfIterations() {
     return this.maxNumberOfIterations;
   }
+
   /**
    * increase the value of maximum number of iterations by 1.
    */
   public void increaseMaxNumberOfIterations() {
     this.maxNumberOfIterations++;
   }
+
   /**
    * decrease the value of maximum number of iterations by 1.
    */
@@ -312,13 +329,14 @@
 
   /**
    * Check termination condition.
+   *
    * @param that another vertex.
    * @return true if equals.
    */
   public boolean terminationConditionEquals(final LoopVertex that) {
     if (this.maxNumberOfIterations.equals(that.getMaxNumberOfIterations()) && Util
-        .checkEqualityOfIntPredicates(this.terminationCondition, that.getTerminationCondition(),
-            this.maxNumberOfIterations)) {
+      .checkEqualityOfIntPredicates(this.terminationCondition, that.getTerminationCondition(),
+        this.maxNumberOfIterations)) {
       return true;
     }
     return false;
@@ -326,6 +344,7 @@
 
   /**
    * Set the intPredicate termination condition for the LoopVertex.
+   *
    * @param terminationCondition the termination condition to set.
    */
   public void setTerminationCondition(final IntPredicate terminationCondition) {
@@ -347,6 +366,7 @@
 
   /**
    * Convert the crossing edges to JSON.
+   *
    * @param map map of the crossing edges.
    * @return a string of JSON showing the crossing edges.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/OperatorVertex.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/OperatorVertex.java
index 4be72f0..1b2136d 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/OperatorVertex.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/OperatorVertex.java
@@ -30,6 +30,7 @@
 
   /**
    * Constructor of OperatorVertex.
+   *
    * @param t transform for the OperatorVertex.
    */
   public OperatorVertex(final Transform t) {
@@ -39,6 +40,7 @@
 
   /**
    * Copy Constructor of OperatorVertex.
+   *
    * @param that the source object for copying
    */
   private OperatorVertex(final OperatorVertex that) {
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/SourceVertex.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/SourceVertex.java
index f606cf5..490c240 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/SourceVertex.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/SourceVertex.java
@@ -25,6 +25,7 @@
 /**
  * IRVertex that reads data from an external source.
  * It is to be implemented in the compiler frontend with source-specific data fetching logic.
+ *
  * @param <O> output type.
  */
 public abstract class SourceVertex<O> extends IRVertex {
@@ -60,6 +61,7 @@
 
   /**
    * Gets the estimated size of bytes. Returns 0L if not applicable.
+   *
    * @return size of input bytes.
    */
   public abstract long getEstimatedSizeBytes();
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ClonedSchedulingProperty.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ClonedSchedulingProperty.java
index edce1d7..ee08619 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ClonedSchedulingProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ClonedSchedulingProperty.java
@@ -26,13 +26,14 @@
 
 /**
  * Specifies cloned execution of a vertex.
- *
+ * <p>
  * A major limitations of the current implementation:
  * *ALL* of the clones are always scheduled immediately
  */
 public final class ClonedSchedulingProperty extends VertexExecutionProperty<ClonedSchedulingProperty.CloneConf> {
   /**
    * Constructor.
+   *
    * @param value value of the execution property.
    */
   private ClonedSchedulingProperty(final CloneConf value) {
@@ -41,6 +42,7 @@
 
   /**
    * Static method exposing the constructor.
+   *
    * @param conf value of the new execution property.
    * @return the newly created execution property.
    */
@@ -74,7 +76,8 @@
 
     /**
      * Clone stragglers judiciously.
-     * @param fractionToWaitFor before trying to clone.
+     *
+     * @param fractionToWaitFor    before trying to clone.
      * @param medianTimeMultiplier to identify stragglers.
      */
     public CloneConf(final double fractionToWaitFor, final double medianTimeMultiplier) {
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/MessageIdVertexProperty.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/MessageIdVertexProperty.java
index 9ba842f..60346f3 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/MessageIdVertexProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/MessageIdVertexProperty.java
@@ -26,6 +26,7 @@
 public final class MessageIdVertexProperty extends VertexExecutionProperty<Integer> {
   /**
    * Constructor.
+   *
    * @param value value of the execution property.
    */
   private MessageIdVertexProperty(final Integer value) {
@@ -34,6 +35,7 @@
 
   /**
    * Static method exposing the constructor.
+   *
    * @param value value of the new execution property.
    * @return the newly created execution property.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ParallelismProperty.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ParallelismProperty.java
index dcfd4b1..992ebe2 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ParallelismProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ParallelismProperty.java
@@ -22,20 +22,20 @@
 
 /**
  * This property decides the number of parallel tasks to use for executing the corresponding IRVertex.
- *
+ * <p>
  * Changing the parallelism requires also changing other execution properties that refer to task offsets.
  * Such execution properties include:
  * {@link ResourceSiteProperty}
  * {@link ResourceAntiAffinityProperty}
  * {@link org.apache.nemo.common.ir.edge.executionproperty.PartitionerProperty}
  * {@link org.apache.nemo.common.ir.edge.executionproperty.PartitionSetProperty}
- *
+ * <p>
  * Moreover, vertices with one-to-one relationships must have the same parallelism.
  * {@link org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty}
- *
+ * <p>
  * Finally, the parallelism cannot be larger than the number of source (e.g., HDFS) input data partitions.
  * {@link org.apache.nemo.common.ir.vertex.SourceVertex}
- *
+ * <p>
  * A violation of any of the above criteria will be caught by Nemo, to ensure correct application semantics.
  */
 public final class ParallelismProperty extends VertexExecutionProperty<Integer> {
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ResourcePriorityProperty.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ResourcePriorityProperty.java
index 798ea54..7b6c6b3 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ResourcePriorityProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ResourcePriorityProperty.java
@@ -26,6 +26,7 @@
 public final class ResourcePriorityProperty extends VertexExecutionProperty<String> {
   /**
    * Constructor.
+   *
    * @param value value of the execution property.
    */
   private ResourcePriorityProperty(final String value) {
@@ -34,6 +35,7 @@
 
   /**
    * Static method exposing the constructor.
+   *
    * @param value value of the new execution property.
    * @return the newly created execution property.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ResourceSiteProperty.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ResourceSiteProperty.java
index 4e44154..ee28e1b 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ResourceSiteProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ResourceSiteProperty.java
@@ -27,20 +27,22 @@
  * TODO #169: Use sites (not node names) in ResourceSiteProperty
  */
 public final class ResourceSiteProperty extends VertexExecutionProperty<HashMap<String, Integer>> {
-    /**
-     * Default constructor.
-     * @param value the map from location to the number of Task that must be executed on the node
-     */
-    public ResourceSiteProperty(final HashMap<String, Integer> value) {
-        super(value);
-    }
+  /**
+   * Default constructor.
+   *
+   * @param value the map from location to the number of Task that must be executed on the node
+   */
+  public ResourceSiteProperty(final HashMap<String, Integer> value) {
+    super(value);
+  }
 
-    /**
-     * Static method for constructing {@link ResourceSiteProperty}.
-     * @param value the map from location to the number of Task that must be executed on the node
-     * @return the execution property
-     */
-    public static ResourceSiteProperty of(final HashMap<String, Integer> value) {
-        return new ResourceSiteProperty(value);
-    }
+  /**
+   * Static method for constructing {@link ResourceSiteProperty}.
+   *
+   * @param value the map from location to the number of Task that must be executed on the node
+   * @return the execution property
+   */
+  public static ResourceSiteProperty of(final HashMap<String, Integer> value) {
+    return new ResourceSiteProperty(value);
+  }
 }
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ResourceSlotProperty.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ResourceSlotProperty.java
index 1b3c161..7475e21 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ResourceSlotProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ResourceSlotProperty.java
@@ -26,7 +26,7 @@
 public final class ResourceSlotProperty extends VertexExecutionProperty<Boolean> {
   private static final ResourceSlotProperty COMPLIANCE_TRUE = new ResourceSlotProperty(true);
   private static final ResourceSlotProperty COMPLIANCE_FALSE
-      = new ResourceSlotProperty(false);
+    = new ResourceSlotProperty(false);
 
   /**
    * Default constructor.
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ScheduleGroupProperty.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ScheduleGroupProperty.java
index f6df810..97e9d6a 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ScheduleGroupProperty.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/executionproperty/ScheduleGroupProperty.java
@@ -26,6 +26,7 @@
 public final class ScheduleGroupProperty extends VertexExecutionProperty<Integer> {
   /**
    * Constructor.
+   *
    * @param value value of the execution property.
    */
   private ScheduleGroupProperty(final Integer value) {
@@ -34,6 +35,7 @@
 
   /**
    * Static method exposing the constructor.
+   *
    * @param value value of the new execution property.
    * @return the newly created execution property.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/MessageAggregatorTransform.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/MessageAggregatorTransform.java
index d4dd77b..e33900f 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/MessageAggregatorTransform.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/MessageAggregatorTransform.java
@@ -40,6 +40,7 @@
 
   /**
    * Default constructor.
+   *
    * @param aggregatedDynOptData per-stage aggregated dynamic optimization data.
    * @param dynOptDataAggregator aggregator to use.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/MessageBarrierTransform.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/MessageBarrierTransform.java
index 4c9a007..fe2e523 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/MessageBarrierTransform.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/MessageBarrierTransform.java
@@ -29,6 +29,7 @@
 
 /**
  * A {@link Transform} that collects task-level statistics used for dynamic optimization.
+ *
  * @param <I> input type.
  * @param <K> output key type.
  * @param <V> output value type.
@@ -42,6 +43,7 @@
 
   /**
    * MessageBarrierTransform constructor.
+   *
    * @param userFunction that analyzes the data.
    */
   public MessageBarrierTransform(final BiFunction<I, Map<K, V>, Map<K, V>> userFunction) {
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/NoWatermarkEmitTransform.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/NoWatermarkEmitTransform.java
index b9fab8d..549a3f1 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/NoWatermarkEmitTransform.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/NoWatermarkEmitTransform.java
@@ -23,6 +23,7 @@
 /**
  * This transform does not emit watermarks.
  * It may be a transform for batch operation that emits collected data when calling {@link Transform#close()}.
+ *
  * @param <I> input type
  * @param <O> output type
  */
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/StreamTransform.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/StreamTransform.java
index 97a7013..9781a8d 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/StreamTransform.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/StreamTransform.java
@@ -26,6 +26,7 @@
 /**
  * A {@link Transform} relays input data from upstream vertex to downstream vertex promptly.
  * This transform can be used for merging input data into the {@link OutputCollector}.
+ *
  * @param <T> input/output type.
  */
 public final class StreamTransform<T> implements Transform<T, T> {
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/Transform.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/Transform.java
index a79c192..a4787e1 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/Transform.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/transform/Transform.java
@@ -28,19 +28,22 @@
  * Interface for specifying 'What' to do with data.
  * It is to be implemented in the compiler frontend, possibly for every operator in a dataflow language.
  * 'How' and 'When' to do with its input/output data are up to the runtime.
+ *
  * @param <I> input type.
  * @param <O> output type.
  */
 public interface Transform<I, O> extends Serializable {
   /**
    * Prepare the transform.
-   * @param context of the transform.
+   *
+   * @param context         of the transform.
    * @param outputCollector that collects outputs.
    */
   void prepare(Context context, OutputCollector<O> outputCollector);
 
   /**
    * On data received.
+   *
    * @param element data received.
    */
   void onData(I element);
@@ -49,6 +52,7 @@
    * On watermark received.
    * This method should be called for the minimum watermark among input streams (input watermark).
    * Transform may emit collected data after receiving watermarks.
+   *
    * @param watermark watermark
    */
   void onWatermark(Watermark watermark);
@@ -70,12 +74,14 @@
 
     /**
      * Put serialized data to send to the executor.
+     *
      * @param serializedData the serialized data.
      */
     void setSerializedData(String serializedData);
 
     /**
      * Retrieve the serialized data on the executor.
+     *
      * @return the serialized data.
      */
     Optional<String> getSerializedData();
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/utility/MessageAggregatorVertex.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/utility/MessageAggregatorVertex.java
index 076ba1b..2e2cdb6 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/utility/MessageAggregatorVertex.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/utility/MessageAggregatorVertex.java
@@ -31,6 +31,7 @@
 
 /**
  * Aggregates upstream messages.
+ *
  * @param <K> of the input pair.
  * @param <V> of the input pair.
  * @param <O> of the output aggregated message.
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/utility/MessageBarrierVertex.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/utility/MessageBarrierVertex.java
index 66ed760..2647794 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/utility/MessageBarrierVertex.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/utility/MessageBarrierVertex.java
@@ -26,6 +26,7 @@
 
 /**
  * Generates messages.
+ *
  * @param <I> input type
  * @param <K> of the output pair.
  * @param <V> of the output pair.
diff --git a/common/src/main/java/org/apache/nemo/common/ir/vertex/utility/SamplingVertex.java b/common/src/main/java/org/apache/nemo/common/ir/vertex/utility/SamplingVertex.java
index 65ccfee..b614022 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/vertex/utility/SamplingVertex.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/vertex/utility/SamplingVertex.java
@@ -32,7 +32,7 @@
   private final float desiredSampleRate;
 
   /**
-   * @param originalVertex to clone.
+   * @param originalVertex    to clone.
    * @param desiredSampleRate percentage of tasks to execute.
    *                          The actual sample rate may vary depending on neighboring sampling vertices.
    */
@@ -80,7 +80,7 @@
 
   /**
    * Obtains a clone of an original edge that is attached to this sampling vertex.
-   *
+   * <p>
    * Original edge: src - to - dst
    * When src == originalVertex, return thisSamplingVertex - to - dst
    * When dst == originalVertex, return src - to - thisSamplingVertex
diff --git a/common/src/main/java/org/apache/nemo/common/partitioner/HashPartitioner.java b/common/src/main/java/org/apache/nemo/common/partitioner/HashPartitioner.java
index 655298e..241f056 100644
--- a/common/src/main/java/org/apache/nemo/common/partitioner/HashPartitioner.java
+++ b/common/src/main/java/org/apache/nemo/common/partitioner/HashPartitioner.java
@@ -33,7 +33,7 @@
    * Constructor.
    *
    * @param numOfPartitions the number of partitions.
-   * @param keyExtractor   the key extractor that extracts keys from elements.
+   * @param keyExtractor    the key extractor that extracts keys from elements.
    */
   public HashPartitioner(final int numOfPartitions,
                          final KeyExtractor keyExtractor) {
diff --git a/common/src/main/java/org/apache/nemo/common/partitioner/Partitioner.java b/common/src/main/java/org/apache/nemo/common/partitioner/Partitioner.java
index f389032..dabaf0a 100644
--- a/common/src/main/java/org/apache/nemo/common/partitioner/Partitioner.java
+++ b/common/src/main/java/org/apache/nemo/common/partitioner/Partitioner.java
@@ -48,7 +48,7 @@
 
   /**
    * @param edgeProperties edge properties.
-   * @param dstProperties vertex properties.
+   * @param dstProperties  vertex properties.
    * @return the partitioner.
    */
   static Partitioner getPartitioner(final ExecutionPropertyMap<EdgeExecutionProperty> edgeProperties,
diff --git a/common/src/main/java/org/apache/nemo/common/pass/Pass.java b/common/src/main/java/org/apache/nemo/common/pass/Pass.java
index 7a540c9..3000fdb 100644
--- a/common/src/main/java/org/apache/nemo/common/pass/Pass.java
+++ b/common/src/main/java/org/apache/nemo/common/pass/Pass.java
@@ -38,6 +38,7 @@
 
   /**
    * Constructor.
+   *
    * @param condition condition under which to run the pass.
    */
   private Pass(final Predicate<IRDAG> condition) {
@@ -46,6 +47,7 @@
 
   /**
    * Getter for the condition under which to apply the pass.
+   *
    * @return the condition under which to apply the pass.
    */
   public final Predicate<IRDAG> getCondition() {
@@ -54,6 +56,7 @@
 
   /**
    * Add the condition to the existing condition to run the pass.
+   *
    * @param newCondition the new condition to add to the existing condition.
    * @return the condition with the new condition added.
    */
diff --git a/common/src/main/java/org/apache/nemo/common/test/EmptyComponents.java b/common/src/main/java/org/apache/nemo/common/test/EmptyComponents.java
index 45af7cf..fbd53c2 100644
--- a/common/src/main/java/org/apache/nemo/common/test/EmptyComponents.java
+++ b/common/src/main/java/org/apache/nemo/common/test/EmptyComponents.java
@@ -19,6 +19,7 @@
 package org.apache.nemo.common.test;
 
 import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.apache.beam.sdk.values.KV;
 import org.apache.nemo.common.KeyExtractor;
 import org.apache.nemo.common.coder.DecoderFactory;
 import org.apache.nemo.common.coder.EncoderFactory;
@@ -33,7 +34,6 @@
 import org.apache.nemo.common.ir.vertex.SourceVertex;
 import org.apache.nemo.common.ir.vertex.transform.NoWatermarkEmitTransform;
 import org.apache.nemo.common.ir.vertex.transform.Transform;
-import org.apache.beam.sdk.values.KV;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -65,6 +65,7 @@
 
   /**
    * Builds dummy IR DAG for testing.
+   *
    * @return the dummy IR DAG.
    */
   public static IRDAG buildEmptyDAG() {
@@ -93,6 +94,7 @@
    * Builds dummy IR DAG to test skew handling.
    * For DataSkewPolicy, shuffle edges needs extra setting for EncoderProperty, DecoderProperty
    * and KeyExtractorProperty by default.
+   *
    * @return the dummy IR DAG.
    */
   public static IRDAG buildEmptyDAGForSkew() {
@@ -198,7 +200,7 @@
     /**
      * Constructor.
      *
-     * @param name name for the vertex.
+     * @param name            name for the vertex.
      * @param minNumReadables for the vertex.
      */
     public EmptySourceVertex(final String name, final int minNumReadables) {
diff --git a/common/src/main/java/org/apache/nemo/common/test/ExampleTestUtil.java b/common/src/main/java/org/apache/nemo/common/test/ExampleTestUtil.java
index 42fc486..628a12f 100644
--- a/common/src/main/java/org/apache/nemo/common/test/ExampleTestUtil.java
+++ b/common/src/main/java/org/apache/nemo/common/test/ExampleTestUtil.java
@@ -33,6 +33,7 @@
  */
 public final class ExampleTestUtil {
   private static final Double ERROR = 1e-8;
+
   /**
    * Private constructor.
    */
@@ -42,8 +43,8 @@
   /**
    * Ensures output correctness with the given test resource file.
    *
-   * @param resourcePath root folder for both resources.
-   * @param outputFileName output file name.
+   * @param resourcePath         root folder for both resources.
+   * @param outputFileName       output file name.
    * @param testResourceFileName the test result file name.
    * @throws IOException if the output is invalid.
    */
@@ -54,38 +55,38 @@
     final String testOutput;
     try (final Stream<Path> fileStream = Files.list(Paths.get(resourcePath))) {
       testOutput = fileStream
-          .filter(Files::isRegularFile)
-           // TODO 346: Do not use test file prefixes
-           // i.e., replace startsWith() with something like regex matching
-          .filter(path -> path.getFileName().toString().startsWith(outputFileName))
-          .flatMap(path -> {
-            try {
-              return Files.lines(path);
-            } catch (final IOException e) {
-              throw new RuntimeException(e);
-            }
-          })
-          .sorted()
-          .reduce("", (p, q) -> (p + "\n" + q));
+        .filter(Files::isRegularFile)
+        // TODO 346: Do not use test file prefixes
+        // i.e., replace startsWith() with something like regex matching
+        .filter(path -> path.getFileName().toString().startsWith(outputFileName))
+        .flatMap(path -> {
+          try {
+            return Files.lines(path);
+          } catch (final IOException e) {
+            throw new RuntimeException(e);
+          }
+        })
+        .sorted()
+        .reduce("", (p, q) -> (p + "\n" + q));
     }
 
     final String resourceOutput;
 
     try (final Stream<String> lineStream = Files.lines(Paths.get(resourcePath + testResourceFileName))) {
       resourceOutput = lineStream
-          .sorted()
-          .reduce("", (p, q) -> (p + "\n" + q));
+        .sorted()
+        .reduce("", (p, q) -> (p + "\n" + q));
     }
 
     if (!testOutput.equals(resourceOutput)) {
       final String outputMsg =
-          "Test output mismatch while comparing [" + outputFileName + "] from [" + testResourceFileName + "] under "
-              + resourcePath + ":\n"
-              + "=============" + outputFileName + "=================="
-              + testOutput
-              + "\n=============" + testResourceFileName + "=================="
-              + resourceOutput
-              + "\n===============================";
+        "Test output mismatch while comparing [" + outputFileName + "] from [" + testResourceFileName + "] under "
+          + resourcePath + ":\n"
+          + "=============" + outputFileName + "=================="
+          + testOutput
+          + "\n=============" + testResourceFileName + "=================="
+          + resourceOutput
+          + "\n===============================";
       throw new RuntimeException(outputMsg);
     }
   }
@@ -95,11 +96,11 @@
    * Due to the floating point math error, the output of the test can be different every time.
    * Thus we cannot compare plain text output, but have to check its numeric error.
    *
-   * @param resourcePath path to resources.
-   * @param outputFileName name of output file.
+   * @param resourcePath         path to resources.
+   * @param outputFileName       name of output file.
    * @param testResourceFileName name of the file to compare the outputs to.
    * @throws RuntimeException if the output is invalid.
-   * @throws IOException exception.
+   * @throws IOException      exception.
    */
   public static void ensureALSOutputValidity(final String resourcePath,
                                              final String outputFileName,
@@ -108,30 +109,30 @@
     final List<List<Double>> testOutput;
     try (final Stream<Path> fileStream = Files.list(Paths.get(resourcePath))) {
       testOutput = fileStream
-          .filter(Files::isRegularFile)
-          .filter(path -> path.getFileName().toString().startsWith(outputFileName))
-          .flatMap(path -> {
-            try {
-              return Files.lines(path);
-            } catch (final IOException e) {
-              throw new RuntimeException(e);
-            }
-          })
-          .sorted()
-          .filter(line -> !line.trim().equals(""))
-          .map(line -> Arrays.asList(line.split("\\s*,\\s*"))
-              .stream().map(s -> Double.valueOf(s)).collect(Collectors.toList()))
-          .collect(Collectors.toList());
+        .filter(Files::isRegularFile)
+        .filter(path -> path.getFileName().toString().startsWith(outputFileName))
+        .flatMap(path -> {
+          try {
+            return Files.lines(path);
+          } catch (final IOException e) {
+            throw new RuntimeException(e);
+          }
+        })
+        .sorted()
+        .filter(line -> !line.trim().equals(""))
+        .map(line -> Arrays.asList(line.split("\\s*,\\s*"))
+          .stream().map(s -> Double.valueOf(s)).collect(Collectors.toList()))
+        .collect(Collectors.toList());
     }
 
     final List<List<Double>> resourceOutput;
     try (final Stream<String> lineStream = Files.lines(Paths.get(resourcePath + testResourceFileName))) {
       resourceOutput = lineStream
-          .sorted()
-          .filter(line -> !line.trim().equals(""))
-          .map(line -> Arrays.asList(line.split("\\s*,\\s*"))
-              .stream().map(s -> Double.valueOf(s)).collect(Collectors.toList()))
-          .collect(Collectors.toList());
+        .sorted()
+        .filter(line -> !line.trim().equals(""))
+        .map(line -> Arrays.asList(line.split("\\s*,\\s*"))
+          .stream().map(s -> Double.valueOf(s)).collect(Collectors.toList()))
+        .collect(Collectors.toList());
     }
 
     if (testOutput.size() != resourceOutput.size()) {
@@ -160,9 +161,9 @@
                                       final String outputFileName) throws IOException {
     try (final Stream<Path> fileStream = Files.list(Paths.get(directory))) {
       final Set<Path> outputFilePaths = fileStream
-          .filter(Files::isRegularFile)
-          .filter(path -> path.getFileName().toString().startsWith(outputFileName))
-          .collect(Collectors.toSet());
+        .filter(Files::isRegularFile)
+        .filter(path -> path.getFileName().toString().startsWith(outputFileName))
+        .collect(Collectors.toSet());
       for (final Path outputFilePath : outputFilePaths) {
         Files.delete(outputFilePath);
       }
diff --git a/common/src/test/java/org/apache/nemo/common/DAGTest.java b/common/src/test/java/org/apache/nemo/common/DAGTest.java
index d1841f8..e735fef 100644
--- a/common/src/test/java/org/apache/nemo/common/DAGTest.java
+++ b/common/src/test/java/org/apache/nemo/common/DAGTest.java
@@ -19,8 +19,8 @@
 package org.apache.nemo.common;
 
 import org.apache.nemo.common.dag.DAG;
-import org.apache.nemo.common.dag.Edge;
 import org.apache.nemo.common.dag.DAGBuilder;
+import org.apache.nemo.common.dag.Edge;
 import org.apache.nemo.common.dag.Vertex;
 import org.junit.Before;
 import org.junit.Test;
@@ -144,6 +144,7 @@
 
   final class IntegerVertex extends Vertex {
     private final int value;
+
     public IntegerVertex(final int value) {
       super(String.valueOf(value));
       this.value = value;
@@ -158,7 +159,7 @@
       if (this == o) {
         return true;
       }
-      if (o == null || getClass() != o.getClass()){
+      if (o == null || getClass() != o.getClass()) {
         return false;
       }
       final IntegerVertex that = (IntegerVertex) o;
diff --git a/common/src/test/java/org/apache/nemo/common/PairTest.java b/common/src/test/java/org/apache/nemo/common/PairTest.java
index 8792131..8b59e12 100644
--- a/common/src/test/java/org/apache/nemo/common/PairTest.java
+++ b/common/src/test/java/org/apache/nemo/common/PairTest.java
@@ -18,7 +18,6 @@
  */
 package org.apache.nemo.common;
 
-import org.apache.nemo.common.Pair;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.test.EmptyComponents;
 import org.junit.Test;
diff --git a/common/src/test/java/org/apache/nemo/common/StateMachineTest.java b/common/src/test/java/org/apache/nemo/common/StateMachineTest.java
index 4d1f63f..41f9cd2 100644
--- a/common/src/test/java/org/apache/nemo/common/StateMachineTest.java
+++ b/common/src/test/java/org/apache/nemo/common/StateMachineTest.java
@@ -18,7 +18,6 @@
  */
 package org.apache.nemo.common;
 
-import org.apache.nemo.common.StateMachine;
 import org.apache.nemo.common.exception.IllegalStateTransitionException;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/common/src/test/java/org/apache/nemo/common/coder/CoderFactoryTest.java b/common/src/test/java/org/apache/nemo/common/coder/CoderFactoryTest.java
index 1b3302e..acd4345 100644
--- a/common/src/test/java/org/apache/nemo/common/coder/CoderFactoryTest.java
+++ b/common/src/test/java/org/apache/nemo/common/coder/CoderFactoryTest.java
@@ -55,8 +55,8 @@
    * @return the decoded element.
    */
   private <T> T encodeAndDecodeElement(final EncoderFactory<T> encoderFactory,
-                                                     final DecoderFactory<T> decoderFactory,
-                                                     final T element) throws Exception {
+                                       final DecoderFactory<T> decoderFactory,
+                                       final T element) throws Exception {
     final byte[] encodedElement;
     try (final ByteArrayOutputStream out = new ByteArrayOutputStream()) {
       final EncoderFactory.Encoder<T> encoder = encoderFactory.create(out);
diff --git a/common/src/test/java/org/apache/nemo/common/ir/IRDAGTest.java b/common/src/test/java/org/apache/nemo/common/ir/IRDAGTest.java
index 1025c7c..9444c33 100644
--- a/common/src/test/java/org/apache/nemo/common/ir/IRDAGTest.java
+++ b/common/src/test/java/org/apache/nemo/common/ir/IRDAGTest.java
@@ -280,7 +280,8 @@
     irdag.insert(Sets.newHashSet(svOne), Sets.newHashSet(sourceVertex));
     mustPass();
 
-    final SamplingVertex svTwo = new SamplingVertex(firstOperatorVertex, 0.1f);;
+    final SamplingVertex svTwo = new SamplingVertex(firstOperatorVertex, 0.1f);
+    ;
     irdag.insert(Sets.newHashSet(svTwo), Sets.newHashSet(firstOperatorVertex));
     mustPass();
 
@@ -319,13 +320,27 @@
       switch (methodIndex) {
         // Annotation methods
         // For simplicity, we test only the EPs for which all possible values are valid.
-        case 0: selectRandomVertex().setProperty(randomCSP()); break;
-        case 1: selectRandomVertex().setProperty(randomRLP()); break;
-        case 2: selectRandomVertex().setProperty(randomRPP()); break;
-        case 3: selectRandomVertex().setProperty(randomRSP()); break;
-        case 4: selectRandomEdge().setProperty(randomDFP()); break;
-        case 5: selectRandomEdge().setProperty(randomDPP()); break;
-        case 6: selectRandomEdge().setProperty(randomDSP()); break;
+        case 0:
+          selectRandomVertex().setProperty(randomCSP());
+          break;
+        case 1:
+          selectRandomVertex().setProperty(randomRLP());
+          break;
+        case 2:
+          selectRandomVertex().setProperty(randomRPP());
+          break;
+        case 3:
+          selectRandomVertex().setProperty(randomRSP());
+          break;
+        case 4:
+          selectRandomEdge().setProperty(randomDFP());
+          break;
+        case 5:
+          selectRandomEdge().setProperty(randomDPP());
+          break;
+        case 6:
+          selectRandomEdge().setProperty(randomDSP());
+          break;
 
         // Reshaping methods
         case 7:
@@ -434,11 +449,16 @@
 
   private DataStoreProperty randomDSP() {
     switch (random.nextInt(4)) {
-      case 0: return DataStoreProperty.of(DataStoreProperty.Value.MemoryStore);
-      case 1: return DataStoreProperty.of(DataStoreProperty.Value.SerializedMemoryStore);
-      case 2: return DataStoreProperty.of(DataStoreProperty.Value.LocalFileStore);
-      case 3: return DataStoreProperty.of(DataStoreProperty.Value.GlusterFileStore);
-      default: throw new IllegalStateException();
+      case 0:
+        return DataStoreProperty.of(DataStoreProperty.Value.MemoryStore);
+      case 1:
+        return DataStoreProperty.of(DataStoreProperty.Value.SerializedMemoryStore);
+      case 2:
+        return DataStoreProperty.of(DataStoreProperty.Value.LocalFileStore);
+      case 3:
+        return DataStoreProperty.of(DataStoreProperty.Value.GlusterFileStore);
+      default:
+        throw new IllegalStateException();
     }
   }
 }
diff --git a/common/src/test/java/org/apache/nemo/common/ir/LoopVertexTest.java b/common/src/test/java/org/apache/nemo/common/ir/LoopVertexTest.java
index da500b1..9bf339b 100644
--- a/common/src/test/java/org/apache/nemo/common/ir/LoopVertexTest.java
+++ b/common/src/test/java/org/apache/nemo/common/ir/LoopVertexTest.java
@@ -18,13 +18,13 @@
  */
 package org.apache.nemo.common.ir;
 
+import org.apache.nemo.common.Pair;
+import org.apache.nemo.common.dag.DAGBuilder;
 import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.vertex.LoopVertex;
 import org.apache.nemo.common.ir.vertex.OperatorVertex;
-import org.apache.nemo.common.Pair;
-import org.apache.nemo.common.dag.DAGBuilder;
 import org.apache.nemo.common.test.EmptyComponents;
 import org.junit.Before;
 import org.junit.Test;
@@ -55,18 +55,18 @@
     final DAGBuilder<IRVertex, IREdge> builder = new DAGBuilder<>();
 
     loopDAGBuilder.addVertex(map1).addVertex(groupByKey).addVertex(combine).addVertex(map2)
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.Shuffle, map1, groupByKey))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, groupByKey, combine))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, combine, map2));
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.Shuffle, map1, groupByKey))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, groupByKey, combine))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, combine, map2));
     loopVertex.addDagIncomingEdge(new IREdge(CommunicationPatternProperty.Value.OneToOne, source, map1));
     loopVertex.addIterativeIncomingEdge(new IREdge(CommunicationPatternProperty.Value.OneToOne, map2, map1));
 
     originalDAG = new IRDAG(builder.addVertex(source).addVertex(map1).addVertex(groupByKey).addVertex(combine).addVertex(map2)
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, source, map1))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.Shuffle, map1, groupByKey))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, groupByKey, combine))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, combine, map2))
-        .build());
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, source, map1))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.Shuffle, map1, groupByKey))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, groupByKey, combine))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, combine, map2))
+      .build());
   }
 
   @Test
@@ -77,13 +77,13 @@
     vertices1.next();
     final Iterator<IRVertex> vertices2 = loopVertex.getDAG().getTopologicalSort().iterator();
     final List<Pair<IRVertex, IRVertex>> list = new ArrayList<>();
-    while  (vertices1.hasNext() && vertices2.hasNext()) {
+    while (vertices1.hasNext() && vertices2.hasNext()) {
       list.add(Pair.of(vertices1.next(), vertices2.next()));
     }
     list.forEach(irVertexPair -> {
-        assertEquals(irVertexPair.left().getExecutionProperties(), irVertexPair.right().getExecutionProperties());
-        assertEquals(originalDAG.getOutgoingEdgesOf(irVertexPair.left()).size(),
-            loopVertex.getDAG().getOutgoingEdgesOf(irVertexPair.right()).size());
+      assertEquals(irVertexPair.left().getExecutionProperties(), irVertexPair.right().getExecutionProperties());
+      assertEquals(originalDAG.getOutgoingEdgesOf(irVertexPair.left()).size(),
+        loopVertex.getDAG().getOutgoingEdgesOf(irVertexPair.right()).size());
     });
 
     assertEquals(source, loopVertex.getDagIncomingEdges().values().iterator().next().iterator().next().getSrc());
diff --git a/common/src/test/java/org/apache/nemo/common/ir/executionproperty/ExecutionPropertyMapTest.java b/common/src/test/java/org/apache/nemo/common/ir/executionproperty/ExecutionPropertyMapTest.java
index 69af0d2..06e6d58 100644
--- a/common/src/test/java/org/apache/nemo/common/ir/executionproperty/ExecutionPropertyMapTest.java
+++ b/common/src/test/java/org/apache/nemo/common/ir/executionproperty/ExecutionPropertyMapTest.java
@@ -32,9 +32,7 @@
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.*;
 
 /**
  * Test {@link ExecutionPropertyMap}.
diff --git a/common/src/test/java/org/apache/nemo/common/util/UtilTest.java b/common/src/test/java/org/apache/nemo/common/util/UtilTest.java
index e46db33..4e3d02d 100644
--- a/common/src/test/java/org/apache/nemo/common/util/UtilTest.java
+++ b/common/src/test/java/org/apache/nemo/common/util/UtilTest.java
@@ -18,25 +18,25 @@
  */
 package org.apache.nemo.common.util;
 
-import static org.junit.Assert.assertEquals;
-
-import java.util.function.IntPredicate;
-
 import org.apache.nemo.common.Util;
 import org.junit.Test;
 
+import java.util.function.IntPredicate;
+
+import static org.junit.Assert.assertEquals;
+
 public class UtilTest {
 
-    @Test
-    public void testCheckEqualityOfIntPredicates() {
+  @Test
+  public void testCheckEqualityOfIntPredicates() {
 
-        IntPredicate firstPredicate = number -> number < 5;
-        IntPredicate secondPredicate = number -> number < 10;
-        assertEquals(true,
-                Util.checkEqualityOfIntPredicates(firstPredicate, secondPredicate, 4));
-        assertEquals(false,
-                Util.checkEqualityOfIntPredicates(firstPredicate, secondPredicate, 5));
-        assertEquals(false,
-                Util.checkEqualityOfIntPredicates(firstPredicate, secondPredicate, 7));
-    }
+    IntPredicate firstPredicate = number -> number < 5;
+    IntPredicate secondPredicate = number -> number < 10;
+    assertEquals(true,
+      Util.checkEqualityOfIntPredicates(firstPredicate, secondPredicate, 4));
+    assertEquals(false,
+      Util.checkEqualityOfIntPredicates(firstPredicate, secondPredicate, 5));
+    assertEquals(false,
+      Util.checkEqualityOfIntPredicates(firstPredicate, secondPredicate, 7));
+  }
 }
diff --git a/compiler/backend/pom.xml b/compiler/backend/pom.xml
index 237ce32..625a313 100644
--- a/compiler/backend/pom.xml
+++ b/compiler/backend/pom.xml
@@ -17,34 +17,35 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-compiler</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-compiler</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
 
-    <artifactId>nemo-compiler-backend</artifactId>
-    <name>Nemo Compiler Backend</name>
+  <artifactId>nemo-compiler-backend</artifactId>
+  <name>Nemo Compiler Backend</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-	    <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-runtime-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-compiler-optimizer</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-runtime-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-compiler-optimizer</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
 </project>
diff --git a/compiler/backend/src/main/java/org/apache/nemo/compiler/backend/Backend.java b/compiler/backend/src/main/java/org/apache/nemo/compiler/backend/Backend.java
index 2a4fc20..73f1eb9 100644
--- a/compiler/backend/src/main/java/org/apache/nemo/compiler/backend/Backend.java
+++ b/compiler/backend/src/main/java/org/apache/nemo/compiler/backend/Backend.java
@@ -24,6 +24,7 @@
 
 /**
  * Interface for backend components.
+ *
  * @param <Plan> the physical execution plan to compile the DAG into.
  */
 @DefaultImplementation(NemoBackend.class)
diff --git a/compiler/backend/src/main/java/org/apache/nemo/compiler/backend/nemo/NemoBackend.java b/compiler/backend/src/main/java/org/apache/nemo/compiler/backend/nemo/NemoBackend.java
index 8d6e6d9..c9921c7 100644
--- a/compiler/backend/src/main/java/org/apache/nemo/compiler/backend/nemo/NemoBackend.java
+++ b/compiler/backend/src/main/java/org/apache/nemo/compiler/backend/nemo/NemoBackend.java
@@ -38,6 +38,7 @@
 
   /**
    * Constructor.
+   *
    * @param physicalPlanGenerator physical plan generator.
    */
   @Inject
diff --git a/compiler/backend/src/main/java/org/apache/nemo/compiler/backend/nemo/NemoPlanRewriter.java b/compiler/backend/src/main/java/org/apache/nemo/compiler/backend/nemo/NemoPlanRewriter.java
index 03373d5..550699a 100644
--- a/compiler/backend/src/main/java/org/apache/nemo/compiler/backend/nemo/NemoPlanRewriter.java
+++ b/compiler/backend/src/main/java/org/apache/nemo/compiler/backend/nemo/NemoPlanRewriter.java
@@ -34,19 +34,22 @@
 import org.slf4j.LoggerFactory;
 
 import javax.inject.Inject;
-import java.util.*;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 import java.util.stream.Collectors;
 
 /**
  * Rewrites the physical plan during execution, to enforce the optimizations of Nemo RunTimePasses.
- *
+ * <p>
  * A high-level flow of a rewrite is as follows:
  * Runtime - (PhysicalPlan-level info) - NemoPlanRewriter - (IRDAG-level info) - NemoOptimizer - (new IRDAG)
  * - NemoPlanRewriter - (new PhysicalPlan) - Runtime
- *
+ * <p>
  * Here, the NemoPlanRewriter acts as a translator between the Runtime that only understands PhysicalPlan-level info,
  * and the NemoOptimizer that only understands IRDAG-level info.
- *
+ * <p>
  * This decoupling between the NemoOptimizer and the Runtime lets Nemo optimization policies dynamically control
  * distributed execution behaviors, and at the same time enjoy correctness/reusability/composability properties that
  * the IRDAG abstraction provides.
diff --git a/compiler/backend/src/test/java/org/apache/nemo/compiler/backend/nemo/NemoBackendTest.java b/compiler/backend/src/test/java/org/apache/nemo/compiler/backend/nemo/NemoBackendTest.java
index ae851fa..12797d2 100644
--- a/compiler/backend/src/test/java/org/apache/nemo/compiler/backend/nemo/NemoBackendTest.java
+++ b/compiler/backend/src/test/java/org/apache/nemo/compiler/backend/nemo/NemoBackendTest.java
@@ -18,12 +18,12 @@
  */
 package org.apache.nemo.compiler.backend.nemo;
 
+import org.apache.nemo.common.dag.DAGBuilder;
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.vertex.OperatorVertex;
-import org.apache.nemo.common.dag.DAGBuilder;
 import org.apache.nemo.common.test.EmptyComponents;
 import org.apache.nemo.compiler.optimizer.policy.TransientResourcePolicy;
 import org.apache.nemo.conf.JobConf;
@@ -53,11 +53,11 @@
   @Before
   public void setUp() throws Exception {
     this.dag = new IRDAG(builder.addVertex(source).addVertex(map1).addVertex(groupByKey).addVertex(combine).addVertex(map2)
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, source, map1))
-        .connectVertices(EmptyComponents.newDummyShuffleEdge(map1, groupByKey))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, groupByKey, combine))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, combine, map2))
-        .build());
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, source, map1))
+      .connectVertices(EmptyComponents.newDummyShuffleEdge(map1, groupByKey))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, groupByKey, combine))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, combine, map2))
+      .build());
 
     this.dag = new TransientResourcePolicy().runCompileTimeOptimization(dag, EMPTY_DAG_DIRECTORY);
 
diff --git a/compiler/frontend/beam/pom.xml b/compiler/frontend/beam/pom.xml
index c999fff..0c19975 100644
--- a/compiler/frontend/beam/pom.xml
+++ b/compiler/frontend/beam/pom.xml
@@ -15,7 +15,8 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/BeamKeyExtractor.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/BeamKeyExtractor.java
index 9ea0591..61818c5 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/BeamKeyExtractor.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/BeamKeyExtractor.java
@@ -19,9 +19,9 @@
 package org.apache.nemo.compiler.frontend.beam;
 
 import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
 import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.nemo.common.KeyExtractor;
-import org.apache.beam.sdk.values.KV;
 
 /**
  * Extracts the key from a KV element.
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/InMemorySideInputReader.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/InMemorySideInputReader.java
index f1bb460..c8fe064 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/InMemorySideInputReader.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/InMemorySideInputReader.java
@@ -29,7 +29,10 @@
 import org.slf4j.LoggerFactory;
 
 import javax.annotation.Nullable;
-import java.util.*;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
 
 /**
  * Accumulates and provides side inputs in memory.
@@ -80,7 +83,8 @@
 
   /**
    * Stores the side input in memory to be used with main inputs.
-   * @param view of the side input.
+   *
+   * @param view             of the side input.
    * @param sideInputElement to add.
    */
   public void addSideInputElement(final PCollectionView<?> view,
@@ -94,6 +98,7 @@
    * Say a DoFn of this reader has 3 main inputs and 4 side inputs.
    * Nemo runtime guarantees that the watermark here
    * is the minimum of the all 7 input streams.
+   *
    * @param newWatermark to set.
    */
   public void setCurrentWatermarkOfAllMainAndSideInputs(final long newWatermark) {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/PipelineTranslationContext.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/PipelineTranslationContext.java
index aec816a..6a4c046 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/PipelineTranslationContext.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/PipelineTranslationContext.java
@@ -39,7 +39,9 @@
 import org.apache.nemo.compiler.frontend.beam.coder.SideInputCoder;
 import org.apache.nemo.compiler.frontend.beam.transform.*;
 
-import java.util.*;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Stack;
 
 /**
  * A collection of translators for the Beam PTransforms.
@@ -55,7 +57,7 @@
   private final Pipeline pipeline;
 
   /**
-   * @param pipeline the pipeline to translate
+   * @param pipeline        the pipeline to translate
    * @param pipelineOptions {@link PipelineOptions}
    */
   PipelineTranslationContext(final Pipeline pipeline,
@@ -101,13 +103,13 @@
 
   /**
    * Say the dstIRVertex consumes three views: view0, view1, and view2.
-   *
+   * <p>
    * We translate that as the following:
    * view0 -> SideInputTransform(index=0) ->
    * view1 -> SideInputTransform(index=1) -> dstIRVertex(with a map from indices to PCollectionViews)
    * view2 -> SideInputTransform(index=2) ->
    *
-   * @param dstVertex vertex.
+   * @param dstVertex  vertex.
    * @param sideInputs of the vertex.
    */
   void addSideInputEdges(final IRVertex dstVertex, final Map<Integer, PCollectionView<?>> sideInputs) {
@@ -145,7 +147,7 @@
   /**
    * Add IR edge to the builder.
    *
-   * @param dst the destination IR vertex.
+   * @param dst   the destination IR vertex.
    * @param input the {@link PValue} {@code dst} consumes
    */
   void addEdgeTo(final IRVertex dst, final PValue input) {
@@ -171,9 +173,9 @@
   }
 
   /**
-   * @param edge IR edge to add.
+   * @param edge         IR edge to add.
    * @param elementCoder element coder.
-   * @param windowCoder window coder.
+   * @param windowCoder  window coder.
    */
   void addEdge(final IREdge edge, final Coder elementCoder, final Coder windowCoder) {
     edge.setProperty(KeyExtractorProperty.of(new BeamKeyExtractor()));
@@ -192,9 +194,10 @@
 
   /**
    * Registers a {@link PValue} as a m.forEach(outputFromGbk -> ain output from the specified {@link IRVertex}.
-   * @param node node
+   *
+   * @param node     node
    * @param irVertex the IR vertex
-   * @param output the {@link PValue} {@code irVertex} emits as main output
+   * @param output   the {@link PValue} {@code irVertex} emits as main output
    */
   void registerMainOutputFrom(final TransformHierarchy.Node node,
                               final IRVertex irVertex,
@@ -206,10 +209,10 @@
   /**
    * Registers a {@link PValue} as an additional output from the specified {@link IRVertex}.
    *
-   * @param node node
+   * @param node     node
    * @param irVertex the IR vertex
-   * @param output the {@link PValue} {@code irVertex} emits as additional output
-   * @param tag the {@link TupleTag} associated with this additional output
+   * @param output   the {@link PValue} {@code irVertex} emits as additional output
+   * @param tag      the {@link TupleTag} associated with this additional output
    */
   void registerAdditionalOutputFrom(final TransformHierarchy.Node node,
                                     final IRVertex irVertex,
@@ -284,7 +287,8 @@
 
   /**
    * Get appropriate coder for {@link PCollectionView}.
-   * @param view {@link PCollectionView}
+   *
+   * @param view    {@link PCollectionView}
    * @param context translation context.
    * @return appropriate {@link Coder} for {@link PCollectionView}
    */
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/PipelineTranslator.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/PipelineTranslator.java
index 816c539..1f91e57 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/PipelineTranslator.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/PipelineTranslator.java
@@ -23,11 +23,22 @@
 import org.apache.beam.runners.core.construction.ParDoTranslation;
 import org.apache.beam.runners.core.construction.TransformInputs;
 import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.coders.CannotProvideCoderException;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.KvCoder;
+import org.apache.beam.sdk.io.Read;
 import org.apache.beam.sdk.runners.AppliedPTransform;
 import org.apache.beam.sdk.runners.TransformHierarchy;
+import org.apache.beam.sdk.transforms.*;
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.transforms.display.HasDisplayData;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindows;
+import org.apache.beam.sdk.transforms.windowing.Window;
+import org.apache.beam.sdk.transforms.windowing.WindowFn;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PCollectionView;
+import org.apache.beam.sdk.values.TupleTag;
+import org.apache.beam.sdk.values.TupleTagList;
 import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
 import org.apache.nemo.common.ir.vertex.IRVertex;
@@ -36,20 +47,19 @@
 import org.apache.nemo.compiler.frontend.beam.source.BeamBoundedSourceVertex;
 import org.apache.nemo.compiler.frontend.beam.source.BeamUnboundedSourceVertex;
 import org.apache.nemo.compiler.frontend.beam.transform.*;
-import org.apache.beam.sdk.coders.*;
-import org.apache.beam.sdk.io.Read;
-import org.apache.beam.sdk.transforms.*;
-import org.apache.beam.sdk.transforms.windowing.Window;
-import org.apache.beam.sdk.transforms.windowing.WindowFn;
-import org.apache.beam.sdk.values.*;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
-import java.lang.annotation.*;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
-import java.util.*;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 import java.util.stream.IntStream;
@@ -93,7 +103,7 @@
   }
 
   /**
-   * @param context provides translation context.
+   * @param context   provides translation context.
    * @param primitive primitive node.
    */
   void translatePrimitive(final PipelineTranslationContext context,
@@ -118,7 +128,7 @@
   }
 
   /**
-   * @param context context.
+   * @param context   context.
    * @param composite transform.
    * @return behavior controls whether or not child transforms are visited.
    */
@@ -175,8 +185,8 @@
   /////////////////////// PRIMITIVE TRANSFORMS
 
   /**
-   * @param ctx provides translation context
-   * @param beamNode the beam node to be translated
+   * @param ctx       provides translation context
+   * @param beamNode  the beam node to be translated
    * @param transform transform which can be obtained from {@code beamNode}
    */
   @PrimitiveTransformTranslator(Read.Unbounded.class)
@@ -190,8 +200,8 @@
   }
 
   /**
-   * @param ctx provides translation context
-   * @param beamNode the beam node to be translated
+   * @param ctx       provides translation context
+   * @param beamNode  the beam node to be translated
    * @param transform transform which can be obtained from {@code beamNode}
    */
   @PrimitiveTransformTranslator(Read.Bounded.class)
@@ -205,8 +215,8 @@
   }
 
   /**
-   * @param ctx provides translation context
-   * @param beamNode the beam node to be translated
+   * @param ctx       provides translation context
+   * @param beamNode  the beam node to be translated
    * @param transform transform which can be obtained from {@code beamNode}
    */
   @PrimitiveTransformTranslator(ParDo.SingleOutput.class)
@@ -226,8 +236,8 @@
   }
 
   /**
-   * @param ctx provides translation context
-   * @param beamNode the beam node to be translated
+   * @param ctx       provides translation context
+   * @param beamNode  the beam node to be translated
    * @param transform transform which can be obtained from {@code beamNode}
    */
   @PrimitiveTransformTranslator(ParDo.MultiOutput.class)
@@ -252,8 +262,8 @@
   }
 
   /**
-   * @param ctx provides translation context
-   * @param beamNode the beam node to be translated
+   * @param ctx       provides translation context
+   * @param beamNode  the beam node to be translated
    * @param transform transform which can be obtained from {@code beamNode}
    */
   @PrimitiveTransformTranslator(GroupByKey.class)
@@ -267,8 +277,8 @@
   }
 
   /**
-   * @param ctx provides translation context
-   * @param beamNode the beam node to be translated
+   * @param ctx       provides translation context
+   * @param beamNode  the beam node to be translated
    * @param transform transform which can be obtained from {@code beamNode}
    */
   @PrimitiveTransformTranslator({Window.class, Window.Assign.class})
@@ -291,8 +301,8 @@
   }
 
   /**
-   * @param ctx provides translation context
-   * @param beamNode the beam node to be translated
+   * @param ctx       provides translation context
+   * @param beamNode  the beam node to be translated
    * @param transform transform which can be obtained from {@code beamNode}
    */
   @PrimitiveTransformTranslator(View.CreatePCollectionView.class)
@@ -307,8 +317,8 @@
   }
 
   /**
-   * @param ctx provides translation context
-   * @param beamNode the beam node to be translated
+   * @param ctx       provides translation context
+   * @param beamNode  the beam node to be translated
    * @param transform transform which can be obtained from {@code beamNode}
    */
   @PrimitiveTransformTranslator(Flatten.PCollections.class)
@@ -329,8 +339,8 @@
    * ({@link Combine.Globally} internally uses {@link Combine.PerKey} which will also be optimized by this translator)
    * Here, we translate this composite transform as a whole, exploiting its accumulator semantics.
    *
-   * @param ctx provides translation context
-   * @param beamNode the beam node to be translated
+   * @param ctx       provides translation context
+   * @param beamNode  the beam node to be translated
    * @param transform transform which can be obtained from {@code beamNode}
    * @return behavior controls whether or not child transforms are visited.
    */
@@ -388,8 +398,8 @@
   }
 
   /**
-   * @param ctx provides translation context
-   * @param beamNode the beam node to be translated
+   * @param ctx       provides translation context
+   * @param beamNode  the beam node to be translated
    * @param transform transform which can be obtained from {@code beamNode}
    * @return behavior controls whether or not child transforms are visited.
    */
@@ -415,8 +425,8 @@
   }
 
   /**
-   * @param ctx provides translation context.
-   * @param beamNode the beam node to be translated.
+   * @param ctx          provides translation context.
+   * @param beamNode     the beam node to be translated.
    * @param sideInputMap side inputs.
    * @return the created DoFnTransform.
    */
@@ -480,7 +490,8 @@
   /**
    * Create a group by key transform.
    * It returns GroupByKeyAndWindowDoFnTransform if window function is not default.
-   * @param ctx translation context
+   *
+   * @param ctx      translation context
    * @param beamNode the beam node to be translated
    * @return group by key transform
    */
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/PipelineVisitor.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/PipelineVisitor.java
index fa3d00d..4cc6574 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/PipelineVisitor.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/PipelineVisitor.java
@@ -32,7 +32,7 @@
   private final PipelineTranslationContext context;
 
   /**
-   * @param pipeline to visit.
+   * @param pipeline        to visit.
    * @param pipelineOptions pipeline options.
    */
   public PipelineVisitor(final Pipeline pipeline, final NemoPipelineOptions pipelineOptions) {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/SideInputElement.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/SideInputElement.java
index 7ec91cc..75d10fa 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/SideInputElement.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/SideInputElement.java
@@ -21,6 +21,7 @@
 /**
  * {@link org.apache.nemo.compiler.frontend.beam.transform.DoFnTransform} treats elements of this type as side inputs.
  * TODO #289: Prevent using SideInputElement in UDFs
+ *
  * @param <T> type of the side input value.
  */
 public final class SideInputElement<T> {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/coder/BeamDecoderFactory.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/coder/BeamDecoderFactory.java
index 12a20d3..9302bf7 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/coder/BeamDecoderFactory.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/coder/BeamDecoderFactory.java
@@ -18,10 +18,10 @@
  */
 package org.apache.nemo.compiler.frontend.beam.coder;
 
-import org.apache.nemo.common.coder.DecoderFactory;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.CoderException;
 import org.apache.beam.sdk.coders.VoidCoder;
+import org.apache.nemo.common.coder.DecoderFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -30,6 +30,7 @@
 
 /**
  * {@link DecoderFactory} from {@link org.apache.beam.sdk.coders.Coder}.
+ *
  * @param <T> the type of element to decode.
  */
 public final class BeamDecoderFactory<T> implements DecoderFactory<T> {
@@ -62,6 +63,7 @@
 
   /**
    * Abstract class for Beam Decoder.
+   *
    * @param <T2> the type of element to decode.
    */
   private abstract class BeamAbstractDecoder<T2> implements Decoder<T2> {
@@ -105,6 +107,7 @@
 
   /**
    * Beam Decoder for non void objects.
+   *
    * @param <T2> the type of element to decode.
    */
   private final class BeamDecoder<T2> extends BeamAbstractDecoder<T2> {
@@ -133,6 +136,7 @@
 
   /**
    * Beam Decoder for {@link VoidCoder}.
+   *
    * @param <T2> the type of element to decode.
    */
   private final class BeamVoidDecoder<T2> extends BeamAbstractDecoder<T2> {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/coder/BeamEncoderFactory.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/coder/BeamEncoderFactory.java
index a000ead..a60cae8 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/coder/BeamEncoderFactory.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/coder/BeamEncoderFactory.java
@@ -18,10 +18,10 @@
  */
 package org.apache.nemo.compiler.frontend.beam.coder;
 
-import org.apache.nemo.common.coder.EncoderFactory;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.CoderException;
 import org.apache.beam.sdk.coders.VoidCoder;
+import org.apache.nemo.common.coder.EncoderFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -30,6 +30,7 @@
 
 /**
  * {@link EncoderFactory} from {@link Coder}.
+ *
  * @param <T> the type of element to encode.
  */
 public final class BeamEncoderFactory<T> implements EncoderFactory<T> {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/coder/SideInputCoder.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/coder/SideInputCoder.java
index 8cefa4b..9660b36 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/coder/SideInputCoder.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/coder/SideInputCoder.java
@@ -28,6 +28,7 @@
 
 /**
  * EncoderFactory for side inputs.
+ *
  * @param <T> type of the side input value.
  */
 public final class SideInputCoder<T> extends StructuredCoder<SideInputElement<T>> {
@@ -35,6 +36,7 @@
 
   /**
    * Private constructor.
+   *
    * @param valueCoder the coder for side input.
    */
   private SideInputCoder(final Coder<T> valueCoder) {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/source/BeamBoundedSourceVertex.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/source/BeamBoundedSourceVertex.java
index 07f3a0e..826e00c 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/source/BeamBoundedSourceVertex.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/source/BeamBoundedSourceVertex.java
@@ -39,6 +39,7 @@
 
 /**
  * SourceVertex implementation for BoundedSource.
+ *
  * @param <O> output type.
  */
 public final class BeamBoundedSourceVertex<O> extends SourceVertex<WindowedValue<O>> {
@@ -50,7 +51,7 @@
   /**
    * Constructor of BeamBoundedSourceVertex.
    *
-   * @param source BoundedSource to read from.
+   * @param source      BoundedSource to read from.
    * @param displayData data to display.
    */
   public BeamBoundedSourceVertex(final BoundedSource<O> source, final DisplayData displayData) {
@@ -125,6 +126,7 @@
 
   /**
    * BoundedSourceReadable class.
+   *
    * @param <T> type.
    */
   private static final class BoundedSourceReadable<T> implements Readable<WindowedValue<T>> {
@@ -134,6 +136,7 @@
 
     /**
      * Constructor of the BoundedSourceReadable.
+     *
      * @param boundedSource the BoundedSource.
      */
     BoundedSourceReadable(final BoundedSource<T> boundedSource) {
@@ -184,7 +187,7 @@
         final Field inputSplitField = boundedSource.getClass().getDeclaredField("inputSplit");
         inputSplitField.setAccessible(true);
         final InputSplit inputSplit = ((HadoopFormatIO.SerializableSplit) inputSplitField
-            .get(boundedSource)).getSplit();
+          .get(boundedSource)).getSplit();
         return Arrays.asList(inputSplit.getLocations());
       } else {
         throw new UnsupportedOperationException();
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/source/BeamUnboundedSourceVertex.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/source/BeamUnboundedSourceVertex.java
index 7371842..44706bd 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/source/BeamUnboundedSourceVertex.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/source/BeamUnboundedSourceVertex.java
@@ -38,6 +38,7 @@
 
 /**
  * SourceVertex implementation for UnboundedSource.
+ *
  * @param <O> output type.
  * @param <M> checkpoint mark type.
  */
@@ -50,7 +51,8 @@
 
   /**
    * The default constructor for beam unbounded source.
-   * @param source unbounded source.
+   *
+   * @param source      unbounded source.
    * @param displayData static display data associated with a pipeline component.
    */
   public BeamUnboundedSourceVertex(final UnboundedSource<O, M> source,
@@ -61,6 +63,7 @@
 
   /**
    * Copy constructor.
+   *
    * @param that the original vertex.
    */
   private BeamUnboundedSourceVertex(final BeamUnboundedSourceVertex<O, M> that) {
@@ -113,11 +116,12 @@
 
   /**
    * UnboundedSourceReadable class.
+   *
    * @param <O> output type.
    * @param <M> checkpoint mark type.
    */
   private static final class UnboundedSourceReadable<O, M extends UnboundedSource.CheckpointMark>
-      implements Readable<Object> {
+    implements Readable<Object> {
     private final UnboundedSource<O, M> unboundedSource;
     private UnboundedSource.UnboundedReader<O> reader;
     private boolean isStarted = false;
@@ -126,6 +130,7 @@
 
     /**
      * Constructor.
+     *
      * @param unboundedSource unbounded source.
      */
     UnboundedSourceReadable(final UnboundedSource<O, M> unboundedSource) {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/AbstractDoFnTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/AbstractDoFnTransform.java
index a8a44a8..1139282 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/AbstractDoFnTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/AbstractDoFnTransform.java
@@ -44,8 +44,8 @@
 /**
  * This is a base class for Beam DoFn Transforms.
  *
- * @param <InputT> input type.
- * @param <InterT> intermediate type.
+ * @param <InputT>  input type.
+ * @param <InterT>  intermediate type.
  * @param <OutputT> output type.
  */
 public abstract class AbstractDoFnTransform<InputT, InterT, OutputT> implements
@@ -84,15 +84,16 @@
 
   /**
    * AbstractDoFnTransform constructor.
-   * @param doFn doFn
-   * @param inputCoder input coder
-   * @param outputCoders output coders
-   * @param mainOutputTag main output tag
+   *
+   * @param doFn                 doFn
+   * @param inputCoder           input coder
+   * @param outputCoders         output coders
+   * @param mainOutputTag        main output tag
    * @param additionalOutputTags additional output tags
-   * @param windowingStrategy windowing strategy
-   * @param sideInputs side inputs
-   * @param options pipeline options
-   * @param displayData display data.
+   * @param windowingStrategy    windowing strategy
+   * @param sideInputs           side inputs
+   * @param options              pipeline options
+   * @param displayData          display data.
    */
   public AbstractDoFnTransform(final DoFn<InterT, OutputT> doFn,
                                final Coder<InputT> inputCoder,
@@ -116,6 +117,7 @@
 
   /**
    * Getter function for side inputs.
+   *
    * @return the side inputs
    */
   final Map<Integer, PCollectionView<?>> getSideInputs() {
@@ -124,6 +126,7 @@
 
   /**
    * Getter function for output manager.
+   *
    * @return the output manager.
    */
   final DoFnRunners.OutputManager getOutputManager() {
@@ -132,6 +135,7 @@
 
   /**
    * Getter function for windowing strategy.
+   *
    * @return the windowing strategy.
    */
   final WindowingStrategy getWindowingStrategy() {
@@ -140,6 +144,7 @@
 
   /**
    * Getter function for output tag.
+   *
    * @return main output tag.
    */
   final TupleTag<OutputT> getMainOutputTag() {
@@ -148,6 +153,7 @@
 
   /**
    * Getter function for DoFn runner.
+   *
    * @return DoFn runner.
    */
   final DoFnRunner<InterT, OutputT> getDoFnRunner() {
@@ -156,6 +162,7 @@
 
   /**
    * Getter function for push back runner.
+   *
    * @return push back runner.
    */
   final PushbackSideInputDoFnRunner<InterT, OutputT> getPushBackRunner() {
@@ -164,6 +171,7 @@
 
   /**
    * Getter function for side input reader.
+   *
    * @return side input reader.
    */
   final InMemorySideInputReader getSideInputReader() {
@@ -172,6 +180,7 @@
 
   /**
    * Getter function for DoFn.
+   *
    * @return DoFn.
    */
   public final DoFn getDoFn() {
@@ -181,7 +190,7 @@
   /**
    * Checks whether the bundle is finished or not.
    * Starts the bundle if it is done.
-   *
+   * <p>
    * TODO #263: Partial Combining for Beam Streaming
    * We may want to use separate methods for doFnRunner/pushBackRunner
    * (same applies to the other bundle-related methods)
@@ -251,6 +260,7 @@
       public StateInternals stateInternals() {
         throw new UnsupportedOperationException("Not support stateInternals in DoFnTransform");
       }
+
       @Override
       public TimerInternals timerInternals() {
         throw new UnsupportedOperationException("Not support timerInternals in DoFnTransform");
@@ -284,6 +294,7 @@
 
   /**
    * Getter function for output collector.
+   *
    * @return output collector.
    */
   public final OutputCollector<WindowedValue<OutputT>> getOutputCollector() {
@@ -304,6 +315,7 @@
 
   /**
    * An abstract function that wraps the original doFn.
+   *
    * @param originalDoFn the original doFn.
    * @return wrapped doFn.
    */
@@ -311,6 +323,7 @@
 
   /**
    * An abstract function that wraps the original output collector.
+   *
    * @param oc the original outputCollector.
    * @return wrapped output collector.
    */
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/CombineFnFinalTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/CombineFnFinalTransform.java
index 9ef17a2..8f7d2e2 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/CombineFnFinalTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/CombineFnFinalTransform.java
@@ -38,6 +38,7 @@
  * (Currently supports batch-style global windows only)
  * TODO #263: Partial Combining for Beam Streaming
  * TODO #264: Partial Combining with Beam SideInputs
+ *
  * @param <K> Key type.
  * @param <A> Accum type.
  * @param <O> Output type.
@@ -53,6 +54,7 @@
 
   /**
    * Constructor.
+   *
    * @param combineFn combine function.
    */
   public CombineFnFinalTransform(final CombineFnBase.GlobalCombineFn<?, A, O> combineFn) {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/CombineFnPartialTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/CombineFnPartialTransform.java
index 5f46f90..c620eb7 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/CombineFnPartialTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/CombineFnPartialTransform.java
@@ -37,6 +37,7 @@
  * (Currently supports batch-style global windows only)
  * TODO #263: Partial Combining for Beam Streaming
  * TODO #264: Partial Combining with Beam SideInputs
+ *
  * @param <K> Key type.
  * @param <I> Input type.
  * @param <A> Accum type.
@@ -52,6 +53,7 @@
 
   /**
    * Constructor.
+   *
    * @param combineFn combine function.
    */
   public CombineFnPartialTransform(final CombineFnBase.GlobalCombineFn<I, A, ?> combineFn) {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/CreateViewTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/CreateViewTransform.java
index 8444f0b..bf0e274 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/CreateViewTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/CreateViewTransform.java
@@ -18,13 +18,13 @@
  */
 package org.apache.nemo.compiler.frontend.beam.transform;
 
+import org.apache.beam.sdk.transforms.Materializations;
+import org.apache.beam.sdk.transforms.ViewFn;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.nemo.common.ir.OutputCollector;
-import org.apache.beam.sdk.transforms.Materializations;
-import org.apache.beam.sdk.transforms.ViewFn;
 import org.apache.beam.sdk.values.KV;
+import org.apache.nemo.common.ir.OutputCollector;
 import org.apache.nemo.common.ir.vertex.transform.Transform;
 import org.apache.nemo.common.punctuation.Watermark;
 
@@ -34,6 +34,7 @@
 
 /**
  * This transforms emits materialized data for each window.
+ *
  * @param <I> input type
  * @param <O> materialized output type
  */
@@ -47,9 +48,10 @@
 
   /**
    * Constructor of CreateViewTransform.
+   *
    * @param viewFn the viewFn that materializes data.
    */
-  public CreateViewTransform(final ViewFn<Materializations.MultimapView<Void, ?>, O> viewFn)  {
+  public CreateViewTransform(final ViewFn<Materializations.MultimapView<Void, ?>, O> viewFn) {
     this.viewFn = viewFn;
     this.windowListMap = new HashMap<>();
     this.currentOutputWatermark = Long.MIN_VALUE;
@@ -119,6 +121,7 @@
 
   /**
    * Represents {@code PrimitiveViewT} supplied to the {@link ViewFn}.
+   *
    * @param <T> primitive view type
    */
   public static final class MultiView<T> implements Materializations.MultimapView<Void, T>, Serializable {
@@ -126,6 +129,7 @@
 
     /**
      * Constructor.
+     *
      * @param iterable placeholder for side input data.
      */
     public MultiView(final Iterable<T> iterable) {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/DefaultOutputManager.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/DefaultOutputManager.java
index 16b4e54..bcb5f88 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/DefaultOutputManager.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/DefaultOutputManager.java
@@ -25,6 +25,7 @@
 
 /**
  * Default output emitter that uses outputCollector.
+ *
  * @param <OutputT> output type
  */
 public final class DefaultOutputManager<OutputT> implements DoFnRunners.OutputManager {
@@ -33,7 +34,7 @@
 
   /**
    * @param outputCollector output collector.
-   * @param mainOutputTag main output tag.
+   * @param mainOutputTag   main output tag.
    */
   DefaultOutputManager(final OutputCollector<WindowedValue<OutputT>> outputCollector,
                        final TupleTag<OutputT> mainOutputTag) {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/DoFnTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/DoFnTransform.java
index 1afc69b..2748b02 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/DoFnTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/DoFnTransform.java
@@ -37,7 +37,7 @@
 /**
  * DoFn transform implementation when there is no side input.
  *
- * @param <InputT> input type.
+ * @param <InputT>  input type.
  * @param <OutputT> output type.
  */
 public final class DoFnTransform<InputT, OutputT> extends AbstractDoFnTransform<InputT, InputT, OutputT> {
@@ -45,14 +45,15 @@
 
   /**
    * DoFnTransform Constructor.
-   * @param doFn doFn
-   * @param inputCoder input coder
-   * @param outputCoders output coders
-   * @param mainOutputTag main output tag
+   *
+   * @param doFn                 doFn
+   * @param inputCoder           input coder
+   * @param outputCoders         output coders
+   * @param mainOutputTag        main output tag
    * @param additionalOutputTags additional output tags
-   * @param windowingStrategy windowing strategy
-   * @param options pipeline options
-   * @param displayData display data.
+   * @param windowingStrategy    windowing strategy
+   * @param options              pipeline options
+   * @param displayData          display data.
    */
   public DoFnTransform(final DoFn<InputT, OutputT> doFn,
                        final Coder<InputT> inputCoder,
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/FlattenTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/FlattenTransform.java
index 082e5d7..af1146f 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/FlattenTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/FlattenTransform.java
@@ -24,6 +24,7 @@
 
 /**
  * Flatten transform implementation.
+ *
  * @param <T> input/output type.
  */
 public final class FlattenTransform<T> implements Transform<T, T> {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/GroupByKeyAndWindowDoFnTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/GroupByKeyAndWindowDoFnTransform.java
index f3a2430..cf75cb4 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/GroupByKeyAndWindowDoFnTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/GroupByKeyAndWindowDoFnTransform.java
@@ -26,9 +26,9 @@
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.sdk.values.KV;
 import org.apache.nemo.common.ir.OutputCollector;
 import org.apache.nemo.common.punctuation.Watermark;
 import org.joda.time.Instant;
@@ -39,7 +39,8 @@
 
 /**
  * Groups elements according to key and window.
- * @param <K> key type.
+ *
+ * @param <K>      key type.
  * @param <InputT> input type.
  */
 public final class GroupByKeyAndWindowDoFnTransform<K, InputT>
@@ -55,12 +56,13 @@
 
   /**
    * GroupByKey constructor.
-   * @param outputCoders output coders
-   * @param mainOutputTag main output tag
+   *
+   * @param outputCoders      output coders
+   * @param mainOutputTag     main output tag
    * @param windowingStrategy windowing strategy
-   * @param options pipeline options
-   * @param reduceFn reduce function
-   * @param displayData display data.
+   * @param options           pipeline options
+   * @param reduceFn          reduce function
+   * @param displayData       display data.
    */
   public GroupByKeyAndWindowDoFnTransform(final Map<TupleTag<?>, Coder<?>> outputCoders,
                                           final TupleTag<KV<K, Iterable<InputT>>> mainOutputTag,
@@ -85,6 +87,7 @@
 
   /**
    * This creates a new DoFn that groups elements by key and window.
+   *
    * @param doFn original doFn.
    * @return GroupAlsoByWindowViaWindowSetNewDoFn
    */
@@ -114,6 +117,7 @@
   /**
    * It collects data for each key.
    * The collected data are emitted at {@link GroupByKeyAndWindowDoFnTransform#onWatermark(Watermark)}
+   *
    * @param element data element
    */
   @Override
@@ -134,8 +138,9 @@
 
   /**
    * Process the collected data and trigger timers.
-   * @param inputWatermark current input watermark
-   * @param processingTime processing time
+   *
+   * @param inputWatermark   current input watermark
+   * @param processingTime   processing time
    * @param synchronizedTime synchronized time
    */
   private void processElementsAndTriggerTimers(final Watermark inputWatermark,
@@ -166,7 +171,8 @@
   /**
    * Output watermark
    * = max(prev output watermark,
-   *          min(input watermark, watermark holds)).
+   * min(input watermark, watermark holds)).
+   *
    * @param inputWatermark input watermark
    */
   private void emitOutputWatermark(final Watermark inputWatermark) {
@@ -219,9 +225,10 @@
   /**
    * Trigger times for current key.
    * When triggering, it emits the windowed data to downstream operators.
-   * @param key key
-   * @param watermark watermark
-   * @param processingTime processing time
+   *
+   * @param key              key
+   * @param watermark        watermark
+   * @param processingTime   processing time
    * @param synchronizedTime synchronized time
    */
   private void triggerTimers(final K key,
@@ -252,6 +259,7 @@
 
   /**
    * Get timer data.
+   *
    * @param timerInternals in-memory timer internals.
    * @return list of timer datas.
    */
@@ -382,6 +390,7 @@
     public void emitWatermark(final Watermark watermark) {
       outputCollector.emitWatermark(watermark);
     }
+
     @Override
     public <T> void emit(final String dstVertexId, final T output) {
       outputCollector.emit(dstVertexId, output);
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/GroupByKeyTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/GroupByKeyTransform.java
index 71c68ea..ab3f258 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/GroupByKeyTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/GroupByKeyTransform.java
@@ -19,9 +19,9 @@
 package org.apache.nemo.compiler.frontend.beam.transform;
 
 import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.sdk.values.KV;
 import org.apache.nemo.common.ir.OutputCollector;
 import org.apache.nemo.common.ir.vertex.transform.NoWatermarkEmitTransform;
-import org.apache.beam.sdk.values.KV;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -29,6 +29,7 @@
 
 /**
  * Group Beam KVs.
+ *
  * @param <I> input type.
  */
 public final class GroupByKeyTransform<I> extends NoWatermarkEmitTransform<I, WindowedValue<KV<Object, List>>> {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/LoopCompositeTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/LoopCompositeTransform.java
index 2c9c3b1..2f2129e 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/LoopCompositeTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/LoopCompositeTransform.java
@@ -28,9 +28,10 @@
  * A composite transform for wrapping transforms inside a loop to create loop operators in the IR.
  * Each iterations are stateless, as the repeating iterations are zipped into a single copy.
  * We assume a single {@link LoopCompositeTransform} inside a for/while loop.
- * @param <inputT> input type of the composite transform.
+ *
+ * @param <inputT>  input type of the composite transform.
  * @param <outputT> output type of the composite transform.
  */
 public abstract class LoopCompositeTransform<inputT extends PInput, outputT extends POutput>
-    extends PTransform<inputT, outputT> implements Serializable {
+  extends PTransform<inputT, outputT> implements Serializable {
 }
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/PushBackDoFnTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/PushBackDoFnTransform.java
index 7ed5150..a9b8572 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/PushBackDoFnTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/PushBackDoFnTransform.java
@@ -40,7 +40,7 @@
 /**
  * DoFn transform implementation with push backs for side inputs.
  *
- * @param <InputT> input type.
+ * @param <InputT>  input type.
  * @param <OutputT> output type.
  */
 public final class PushBackDoFnTransform<InputT, OutputT> extends AbstractDoFnTransform<InputT, InputT, OutputT> {
@@ -53,15 +53,16 @@
 
   /**
    * PushBackDoFnTransform Constructor.
-   * @param doFn doFn
-   * @param inputCoder input coder
-   * @param outputCoders output coders
-   * @param mainOutputTag main output tag
+   *
+   * @param doFn                 doFn
+   * @param inputCoder           input coder
+   * @param outputCoders         output coders
+   * @param mainOutputTag        main output tag
    * @param additionalOutputTags additional output tags
-   * @param windowingStrategy windowing strategy
-   * @param sideInputs side inputs
-   * @param options pipeline options
-   * @param displayData display data.
+   * @param windowingStrategy    windowing strategy
+   * @param sideInputs           side inputs
+   * @param options              pipeline options
+   * @param displayData          display data.
    */
   public PushBackDoFnTransform(final DoFn<InputT, OutputT> doFn,
                                final Coder<InputT> inputCoder,
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/SideInputTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/SideInputTransform.java
index 0ddb631..4758066 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/SideInputTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/SideInputTransform.java
@@ -27,6 +27,7 @@
 /**
  * Side input transform implementation.
  * TODO #297: Consider Removing SideInputTransform
+ *
  * @param <T> input/output type.
  */
 public final class SideInputTransform<T> implements Transform<WindowedValue<T>, WindowedValue<SideInputElement<T>>> {
@@ -35,6 +36,7 @@
 
   /**
    * Constructor.
+   *
    * @param index side input index.
    */
   public SideInputTransform(final int index) {
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/WindowFnTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/WindowFnTransform.java
index a3dd2e1..14ea5ae 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/WindowFnTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/WindowFnTransform.java
@@ -22,10 +22,10 @@
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
+import org.apache.beam.sdk.transforms.windowing.WindowFn;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.nemo.common.ir.OutputCollector;
 import org.apache.nemo.common.ir.vertex.transform.Transform;
-import org.apache.beam.sdk.transforms.windowing.WindowFn;
 import org.apache.nemo.common.punctuation.Watermark;
 import org.joda.time.Instant;
 
@@ -35,6 +35,7 @@
  * Windowing transform implementation.
  * This transform simply windows the given elements into
  * finite windows according to a user-specified WindowFnTransform.
+ *
  * @param <T> input/output type.
  * @param <W> window type
  */
@@ -46,7 +47,8 @@
 
   /**
    * Default Constructor.
-   * @param windowFn windowFn for the Transform.
+   *
+   * @param windowFn    windowFn for the Transform.
    * @param displayData display data.
    */
   public WindowFnTransform(final WindowFn windowFn, final DisplayData displayData) {
diff --git a/compiler/frontend/spark/pom.xml b/compiler/frontend/spark/pom.xml
index eb2c434..a40468a 100644
--- a/compiler/frontend/spark/pom.xml
+++ b/compiler/frontend/spark/pom.xml
@@ -17,121 +17,122 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-compiler</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../../</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-compiler</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../../</relativePath>
+  </parent>
 
-    <artifactId>nemo-compiler-frontend-spark</artifactId>
-    <name>Nemo Compiler Frontend: Spark</name>
+  <artifactId>nemo-compiler-frontend-spark</artifactId>
+  <name>Nemo Compiler Frontend: Spark</name>
 
-    <dependencies>
-	    <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-client</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-core_2.11</artifactId>
-            <version>${spark.version}</version>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.apache.hadoop</groupId>
-                    <artifactId>hadoop-client</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client</artifactId>
-            <version>${hadoop.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-sql_2.11</artifactId>
-            <version>${spark.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-library</artifactId>
-            <version>${scala.version}</version>
-        </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-client</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-core_2.11</artifactId>
+      <version>${spark.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-sql_2.11</artifactId>
+      <version>${spark.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-library</artifactId>
+      <version>${scala.version}</version>
+    </dependency>
+  </dependencies>
 
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>net.alchim31.maven</groupId>
-                <artifactId>scala-maven-plugin</artifactId>
-                <version>3.3.1</version>
-                <configuration>
-                    <recompileMode>incremental</recompileMode>
-                    <javacArgs>
-                        <javacArg>-Xlint:unchecked</javacArg>
-                        <javacArg>-Xlint:deprecation</javacArg>
-                    </javacArgs>
-                </configuration>
-                <executions>
-                    <execution>
-                        <id>scala-compile-first</id>
-                        <phase>process-resources</phase>
-                        <goals>
-                            <goal>add-source</goal>
-                            <goal>compile</goal>
-                        </goals>
-                    </execution>
-                    <execution>
-                        <id>scala-test-compile</id>
-                        <phase>process-test-resources</phase>
-                        <goals>
-                            <goal>testCompile</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <version>2.0.2</version>
-                <executions>
-                    <execution>
-                        <phase>compile</phase>
-                        <goals>
-                            <goal>compile</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-javadoc-plugin</artifactId>
-                <version>3.0.0</version>
-                <configuration>
-                    <outputDirectory>docs/apidocs</outputDirectory>
-                    <reportOutputDirectory>docs/apidocs</reportOutputDirectory>
-                    <doclint>-missing</doclint>
-                </configuration>
-                <executions>
-                    <execution>
-                        <id>aggregate</id>
-                        <goals>
-                            <goal>aggregate</goal>
-                        </goals>
-                        <phase>site</phase>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>net.alchim31.maven</groupId>
+        <artifactId>scala-maven-plugin</artifactId>
+        <version>3.3.1</version>
+        <configuration>
+          <recompileMode>incremental</recompileMode>
+          <javacArgs>
+            <javacArg>-Xlint:unchecked</javacArg>
+            <javacArg>-Xlint:deprecation</javacArg>
+          </javacArgs>
+        </configuration>
+        <executions>
+          <execution>
+            <id>scala-compile-first</id>
+            <phase>process-resources</phase>
+            <goals>
+              <goal>add-source</goal>
+              <goal>compile</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>scala-test-compile</id>
+            <phase>process-test-resources</phase>
+            <goals>
+              <goal>testCompile</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <executions>
+          <execution>
+            <phase>compile</phase>
+            <goals>
+              <goal>compile</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-javadoc-plugin</artifactId>
+        <version>3.0.0</version>
+        <configuration>
+          <outputDirectory>docs/apidocs</outputDirectory>
+          <reportOutputDirectory>docs/apidocs</reportOutputDirectory>
+          <doclint>-missing</doclint>
+        </configuration>
+        <executions>
+          <execution>
+            <id>aggregate</id>
+            <goals>
+              <goal>aggregate</goal>
+            </goals>
+            <phase>site</phase>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
 </project>
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/coder/SparkDecoderFactory.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/coder/SparkDecoderFactory.java
index cec71d2..9e9e8b4 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/coder/SparkDecoderFactory.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/coder/SparkDecoderFactory.java
@@ -28,6 +28,7 @@
 
 /**
  * Spark DecoderFactory for serialization.
+ *
  * @param <T> type of the object to deserialize.
  */
 public final class SparkDecoderFactory<T> implements DecoderFactory<T> {
@@ -56,6 +57,7 @@
 
   /**
    * SparkDecoder.
+   *
    * @param <T2> type of the object to deserialize.
    */
   private final class SparkDecoder<T2> implements Decoder<T2> {
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/coder/SparkEncoderFactory.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/coder/SparkEncoderFactory.java
index a9d6d59..bb1174b 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/coder/SparkEncoderFactory.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/coder/SparkEncoderFactory.java
@@ -28,6 +28,7 @@
 
 /**
  * Spark EncoderFactory for serialization.
+ *
  * @param <T> type of the object to serialize.
  */
 public final class SparkEncoderFactory<T> implements EncoderFactory<T> {
@@ -56,6 +57,7 @@
 
   /**
    * SparkEncoder.
+   *
    * @param <T2> type of the object to serialize.
    */
   private final class SparkEncoder<T2> implements Encoder<T2> {
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/JavaSparkContext.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/JavaSparkContext.java
index c667041..d4cd2a7 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/JavaSparkContext.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/JavaSparkContext.java
@@ -31,6 +31,7 @@
 
   /**
    * Constructor.
+   *
    * @param sparkContext spark context to wrap.
    */
   public JavaSparkContext(final SparkContext sparkContext) {
@@ -62,7 +63,7 @@
    * Initiate a JavaRDD with the number of parallelism.
    *
    * @param list input data as list.
-   * @param <T> type of the initial element.
+   * @param <T>  type of the initial element.
    * @return the newly initiated JavaRDD.
    */
   public <T> JavaRDD<T> parallelize(final List<T> list) {
@@ -72,9 +73,9 @@
   /**
    * Initiate a JavaRDD with the number of parallelism.
    *
-   * @param l input data as list.
+   * @param l      input data as list.
    * @param slices number of slices (parallelism).
-   * @param <T> type of the initial element.
+   * @param <T>    type of the initial element.
    * @return the newly initiated JavaRDD.
    */
   public <T> JavaRDD<T> parallelize(final List<T> l, final int slices) {
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/SparkBroadcast.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/SparkBroadcast.java
index c344b5b..e267873 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/SparkBroadcast.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/SparkBroadcast.java
@@ -29,7 +29,8 @@
 
   /**
    * Constructor.
-   * @param tag broadcast id.
+   *
+   * @param tag       broadcast id.
    * @param classType class type.
    */
   SparkBroadcast(final long tag, final Class<T> classType) {
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/SparkContext.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/SparkContext.java
index 5d69470..c12b3b0 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/SparkContext.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/SparkContext.java
@@ -57,9 +57,9 @@
   /**
    * Initiate a JavaRDD with the number of parallelism.
    *
-   * @param seq        input data as list.
-   * @param numSlices  number of slices (parallelism).
-   * @param evidence   type of the initial element.
+   * @param seq       input data as list.
+   * @param numSlices number of slices (parallelism).
+   * @param evidence  type of the initial element.
    * @return the newly initiated JavaRDD.
    */
   @Override
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/SparkFrontendUtils.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/SparkFrontendUtils.java
index 680a29b..2f15d54 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/SparkFrontendUtils.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/SparkFrontendUtils.java
@@ -41,7 +41,10 @@
 import org.apache.spark.api.java.function.Function;
 import org.apache.spark.api.java.function.Function2;
 import org.apache.spark.api.java.function.PairFunction;
-import org.apache.spark.serializer.*;
+import org.apache.spark.serializer.JavaSerializer;
+import org.apache.spark.serializer.KryoSerializer;
+import org.apache.spark.serializer.Serializer;
+import org.apache.spark.serializer.SerializerInstance;
 import scala.Function1;
 import scala.Tuple2;
 import scala.collection.JavaConverters;
@@ -50,7 +53,9 @@
 import scala.reflect.ClassTag$;
 
 import java.nio.ByteBuffer;
-import java.util.*;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Stack;
 
 /**
  * Utility class for RDDs.
@@ -72,7 +77,7 @@
    */
   public static Serializer deriveSerializerFrom(final org.apache.spark.SparkContext sparkContext) {
     if (sparkContext.conf().get("spark.serializer", "")
-        .equals("org.apache.spark.serializer.KryoSerializer")) {
+      .equals("org.apache.spark.serializer.KryoSerializer")) {
       return new KryoSerializer(sparkContext.conf());
     } else {
       return new JavaSerializer(sparkContext.conf());
@@ -99,7 +104,7 @@
     builder.addVertex(collectVertex, loopVertexStack);
 
     final IREdge newEdge = new IREdge(getEdgeCommunicationPattern(lastVertex, collectVertex),
-        lastVertex, collectVertex);
+      lastVertex, collectVertex);
     newEdge.setProperty(EncoderProperty.of(new SparkEncoderFactory(serializer)));
     newEdge.setProperty(DecoderProperty.of(new SparkDecoderFactory(serializer)));
     newEdge.setProperty(SPARK_KEY_EXTRACTOR_PROP);
@@ -121,8 +126,8 @@
   public static CommunicationPatternProperty.Value getEdgeCommunicationPattern(final IRVertex src,
                                                                                final IRVertex dst) {
     if (dst instanceof OperatorVertex
-        && (((OperatorVertex) dst).getTransform() instanceof ReduceByKeyTransform
-        || ((OperatorVertex) dst).getTransform() instanceof GroupByKeyTransform)) {
+      && (((OperatorVertex) dst).getTransform() instanceof ReduceByKeyTransform
+      || ((OperatorVertex) dst).getTransform() instanceof GroupByKeyTransform)) {
       return CommunicationPatternProperty.Value.Shuffle;
     } else {
       return CommunicationPatternProperty.Value.OneToOne;
@@ -131,7 +136,7 @@
 
   /**
    * Converts a {@link Function1} to a corresponding {@link Function}.
-   *
+   * <p>
    * Here, we use the Spark 'JavaSerializer' to facilitate debugging in the future.
    * TODO #205: RDD Closure with Broadcast Variables Serialization Bug
    *
@@ -189,7 +194,7 @@
    * @return the converted Java function.
    */
   public static <I, O> FlatMapFunction<I, O> toJavaFlatMapFunction(
-      final Function1<I, TraversableOnce<O>> scalaFunction) {
+    final Function1<I, TraversableOnce<O>> scalaFunction) {
     return new FlatMapFunction<I, O>() {
       @Override
       public Iterator<O> call(final I i) throws Exception {
@@ -208,7 +213,7 @@
    * @return the converted map function.
    */
   public static <T, K, V> Function<T, Tuple2<K, V>> pairFunctionToPlainFunction(
-      final PairFunction<T, K, V> pairFunction) {
+    final PairFunction<T, K, V> pairFunction) {
     return new Function<T, Tuple2<K, V>>() {
       @Override
       public Tuple2<K, V> call(final T elem) throws Exception {
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/rdd/JavaPairRDD.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/rdd/JavaPairRDD.java
index 4ef5999..1c85965 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/rdd/JavaPairRDD.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/rdd/JavaPairRDD.java
@@ -39,6 +39,7 @@
 
 /**
  * Java RDD for pairs.
+ *
  * @param <K> key type.
  * @param <V> value type.
  */
@@ -95,7 +96,7 @@
   public JavaPairRDD<K, V> reduceByKey(final Function2<V, V, V> func) {
     // Explicit conversion
     final PairRDDFunctions<K, V> pairRdd = RDD.rddToPairRDDFunctions(
-        rdd, ClassTag$.MODULE$.apply(Object.class), ClassTag$.MODULE$.apply(Object.class), null);
+      rdd, ClassTag$.MODULE$.apply(Object.class), ClassTag$.MODULE$.apply(Object.class), null);
     final RDD<Tuple2<K, V>> reducedRdd = pairRdd.reduceByKey(func);
     return JavaPairRDD.fromRDD(reducedRdd);
   }
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/rdd/JavaRDD.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/rdd/JavaRDD.java
index 18302da..e8b8f12 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/rdd/JavaRDD.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/core/rdd/JavaRDD.java
@@ -29,7 +29,10 @@
 import org.apache.nemo.compiler.frontend.spark.source.SparkTextFileBoundedSourceVertex;
 import org.apache.nemo.compiler.frontend.spark.sql.Dataset;
 import org.apache.nemo.compiler.frontend.spark.sql.SparkSession;
-import org.apache.spark.*;
+import org.apache.spark.Partition;
+import org.apache.spark.Partitioner;
+import org.apache.spark.SparkContext;
+import org.apache.spark.TaskContext;
 import org.apache.spark.api.java.JavaFutureAction;
 import org.apache.spark.api.java.Optional;
 import org.apache.spark.api.java.function.*;
@@ -40,11 +43,15 @@
 import scala.Tuple2;
 import scala.reflect.ClassTag$;
 
-import java.util.*;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 import java.util.concurrent.atomic.AtomicInteger;
 
 /**
  * Java RDD.
+ *
  * @param <T> type of the final element.
  */
 public final class JavaRDD<T> extends org.apache.spark.api.java.JavaRDD<T> {
@@ -70,7 +77,7 @@
     builder.addVertex(initializedSourceVertex);
 
     final RDD<T> nemoRdd = new RDD<>(sparkContext, builder.buildWithoutSourceSinkCheck(),
-        initializedSourceVertex, Option.empty(), ClassTag$.MODULE$.apply(Object.class));
+      initializedSourceVertex, Option.empty(), ClassTag$.MODULE$.apply(Object.class));
 
     return new JavaRDD<>(nemoRdd);
   }
@@ -115,7 +122,7 @@
     builder.addVertex(sparkBoundedSourceVertex);
 
     return new JavaRDD<>(
-        sparkRDD, sparkSession.sparkContext(), builder.buildWithoutSourceSinkCheck(), sparkBoundedSourceVertex);
+      sparkRDD, sparkSession.sparkContext(), builder.buildWithoutSourceSinkCheck(), sparkBoundedSourceVertex);
   }
 
   /**
@@ -203,7 +210,7 @@
   @Override
   public <K2, V2> JavaPairRDD<K2, V2> mapToPair(final PairFunction<T, K2, V2> f) {
     final RDD<Tuple2<K2, V2>> pairRdd =
-        rdd.map(SparkFrontendUtils.pairFunctionToPlainFunction(f), ClassTag$.MODULE$.apply(Object.class));
+      rdd.map(SparkFrontendUtils.pairFunctionToPlainFunction(f), ClassTag$.MODULE$.apply(Object.class));
     return JavaPairRDD.fromRDD(pairRdd);
   }
 
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/source/SparkDatasetBoundedSourceVertex.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/source/SparkDatasetBoundedSourceVertex.java
index d7b3794..db74eca 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/source/SparkDatasetBoundedSourceVertex.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/source/SparkDatasetBoundedSourceVertex.java
@@ -23,7 +23,8 @@
 import org.apache.nemo.common.ir.vertex.SourceVertex;
 import org.apache.nemo.compiler.frontend.spark.sql.Dataset;
 import org.apache.nemo.compiler.frontend.spark.sql.SparkSession;
-import org.apache.spark.*;
+import org.apache.spark.Partition;
+import org.apache.spark.TaskContext$;
 import org.apache.spark.rdd.RDD;
 import scala.collection.JavaConverters;
 
@@ -33,6 +34,7 @@
 
 /**
  * Bounded source vertex for Spark Dataset.
+ *
  * @param <T> type of data to read.
  */
 public final class SparkDatasetBoundedSourceVertex<T> extends SourceVertex<T> {
@@ -51,10 +53,10 @@
     final Partition[] partitions = rdd.getPartitions();
     for (int i = 0; i < partitions.length; i++) {
       readables.add(new SparkDatasetBoundedSourceReadable(
-          partitions[i],
-          sparkSession.getDatasetCommandsList(),
-          sparkSession.getInitialConf(),
-          i));
+        partitions[i],
+        sparkSession.getDatasetCommandsList(),
+        sparkSession.getInitialConf(),
+        i));
     }
     this.estimatedByteSize = dataset.javaRDD()
       .map(o -> (long) o.toString().getBytes("UTF-8").length)
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/source/SparkTextFileBoundedSourceVertex.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/source/SparkTextFileBoundedSourceVertex.java
index 5679c67..500162c 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/source/SparkTextFileBoundedSourceVertex.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/source/SparkTextFileBoundedSourceVertex.java
@@ -21,13 +21,18 @@
 import org.apache.nemo.common.ir.BoundedIteratorReadable;
 import org.apache.nemo.common.ir.Readable;
 import org.apache.nemo.common.ir.vertex.SourceVertex;
-import org.apache.spark.*;
+import org.apache.spark.Partition;
+import org.apache.spark.SparkConf;
+import org.apache.spark.SparkContext;
+import org.apache.spark.TaskContext$;
 import org.apache.spark.rdd.RDD;
 import org.apache.spark.util.SizeEstimator;
 import scala.collection.JavaConverters;
 
 import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
 
 /**
  * Bounded source vertex for Spark text file.
@@ -50,11 +55,11 @@
     final Partition[] partitions = sparkContext.textFile(inputPath, numPartitions).getPartitions();
     for (int i = 0; i < partitions.length; i++) {
       readables.add(new SparkTextFileBoundedSourceReadable(
-          partitions[i],
-          sparkContext.getConf(),
-          i,
-          inputPath,
-          numPartitions));
+        partitions[i],
+        sparkContext.getConf(),
+        i,
+        inputPath,
+        numPartitions));
     }
     this.estimatedSizeBytes = SizeEstimator.estimate(sparkContext.textFile(inputPath, numPartitions));
   }
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/DataFrameReader.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/DataFrameReader.java
index ff9bdbc..5de3da8 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/DataFrameReader.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/DataFrameReader.java
@@ -31,6 +31,7 @@
 
   /**
    * Constructor.
+   *
    * @param sparkSession spark session.
    */
   DataFrameReader(final SparkSession sparkSession) {
@@ -103,9 +104,9 @@
                            final long lowerBound, final long upperBound, final int numPartitions,
                            final java.util.Properties connectionProperties) {
     final boolean userTriggered = initializeFunction(
-        url, table, columnName, lowerBound, upperBound, numPartitions, connectionProperties);
+      url, table, columnName, lowerBound, upperBound, numPartitions, connectionProperties);
     final Dataset<Row> result = Dataset.from(super.jdbc(
-        url, table, columnName, lowerBound, upperBound, numPartitions, connectionProperties));
+      url, table, columnName, lowerBound, upperBound, numPartitions, connectionProperties));
     this.setIsUserTriggered(userTriggered);
     return result;
   }
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/Dataset.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/Dataset.java
index 59ab5ea..7eb999e 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/Dataset.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/Dataset.java
@@ -33,6 +33,7 @@
 
 /**
  * A dataset component: it represents relational data.
+ *
  * @param <T> type of the data.
  */
 public final class Dataset<T> extends org.apache.spark.sql.Dataset<T> implements NemoSparkUserFacingClass {
@@ -90,18 +91,18 @@
   /**
    * Create a {@link RDD} component from this data set.
    * To transparently give our RDD to user programs, this method have to be overridden.
-   *
+   * <p>
    * By overriding this method, if a method (such as reduce) of super ({@link org.apache.spark.sql.Dataset}) is called
    * and it uses super's rdd, the rdd will be our rdd returned by this method.
    * This is an intended behavior and the result will be calculated by our system.
    *
    * @return the new RDD component.
    */
-   @Override
-   public RDD<T> rdd() {
-     final JavaRDD<T> javaRDD = JavaRDD.of((SparkSession) super.sparkSession(), this);
-     return javaRDD.rdd();
-   }
+  @Override
+  public RDD<T> rdd() {
+    final JavaRDD<T> javaRDD = JavaRDD.of((SparkSession) super.sparkSession(), this);
+    return javaRDD.rdd();
+  }
 
   @Override
   public Dataset<Row> agg(final Column expr, final Column... exprs) {
@@ -444,8 +445,8 @@
 
   @Override
   public <U> Dataset<U> mapPartitions(
-      final scala.Function1<scala.collection.Iterator<T>, scala.collection.Iterator<U>> func,
-      final Encoder<U> evidence) {
+    final scala.Function1<scala.collection.Iterator<T>, scala.collection.Iterator<U>> func,
+    final Encoder<U> evidence) {
     final boolean userTriggered = initializeFunction(func, evidence);
     final Dataset<U> result = from(super.mapPartitions(func, evidence));
     this.setIsUserTriggered(userTriggered);
@@ -536,10 +537,10 @@
     return result;
   }
 
-//  @Override
-//  public java.util.List<Dataset<T>> randomSplitAsList(double[] weights, long seed) {
-//    return super.randomSplitAsList(weights, seed).stream().map(ds -> from(ds)).collect(Collectors.toList());
-//  }
+  //  @Override
+  //  public java.util.List<Dataset<T>> randomSplitAsList(double[] weights, long seed) {
+  //    return super.randomSplitAsList(weights, seed).stream().map(ds -> from(ds)).collect(Collectors.toList());
+  //  }
 
   @Override
   public Dataset<T> repartition(final Column... partitionExprs) {
@@ -756,7 +757,7 @@
 
   @Override
   public <U> Dataset<U> transform(
-      final scala.Function1<org.apache.spark.sql.Dataset<T>, org.apache.spark.sql.Dataset<U>> t) {
+    final scala.Function1<org.apache.spark.sql.Dataset<T>, org.apache.spark.sql.Dataset<U>> t) {
     final boolean userTriggered = initializeFunction(t);
     final Dataset<U> result = from(super.transform(t));
     this.setIsUserTriggered(userTriggered);
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/NemoSparkUserFacingClass.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/NemoSparkUserFacingClass.java
index fe0fd1a..8cf3b82 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/NemoSparkUserFacingClass.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/NemoSparkUserFacingClass.java
@@ -32,6 +32,7 @@
 
   /**
    * Set the userTriggered flag.
+   *
    * @param bool boolean to set the flag to.
    */
   default void setIsUserTriggered(boolean bool) {
@@ -45,6 +46,7 @@
 
   /**
    * A method to distinguish user-called functions from internal calls.
+   *
    * @param args arguments of the method
    * @return whether or not this function has been called by the user.
    */
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/SparkSession.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/SparkSession.java
index 17aeecd..9c6fbec 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/SparkSession.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/sql/SparkSession.java
@@ -18,10 +18,10 @@
  */
 package org.apache.nemo.compiler.frontend.spark.sql;
 
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.nemo.client.JobLauncher;
 import org.apache.nemo.compiler.frontend.spark.core.SparkContext;
 import org.apache.nemo.conf.JobConf;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.reef.tang.Configuration;
 import org.apache.reef.tang.Injector;
 import org.apache.reef.tang.Tang;
@@ -115,7 +115,7 @@
    */
   public static <T> Dataset<T> initializeDataset(final SparkSession spark,
                                                  final LinkedHashMap<String, Object[]> commandList)
-      throws OperationNotSupportedException {
+    throws OperationNotSupportedException {
     Object result = spark;
 
     for (Map.Entry<String, Object[]> command : commandList.entrySet()) {
@@ -126,8 +126,8 @@
       final Class<?>[] argTypes = Stream.of(args).map(Object::getClass).toArray(Class[]::new);
 
       if (!className.contains("SparkSession")
-          && !className.contains("DataFrameReader")
-          && !className.contains("Dataset")) {
+        && !className.contains("DataFrameReader")
+        && !className.contains("Dataset")) {
         throw new OperationNotSupportedException(command + " is not yet supported.");
       }
 
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/CollectTransform.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/CollectTransform.java
index 7f5aad6..08d8af6 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/CollectTransform.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/CollectTransform.java
@@ -18,9 +18,9 @@
  */
 package org.apache.nemo.compiler.frontend.spark.transform;
 
+import org.apache.commons.lang3.SerializationUtils;
 import org.apache.nemo.common.ir.OutputCollector;
 import org.apache.nemo.common.ir.vertex.transform.NoWatermarkEmitTransform;
-import org.apache.commons.lang3.SerializationUtils;
 
 import java.util.ArrayList;
 import java.util.Base64;
@@ -28,6 +28,7 @@
 /**
  * Collect transform.
  * This transform is used for spark batch job, so do not emit watermark.
+ *
  * @param <T> type of data to collect.
  */
 public final class CollectTransform<T> extends NoWatermarkEmitTransform<T, T> {
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/FlatMapTransform.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/FlatMapTransform.java
index aca41a0..a0ded0e 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/FlatMapTransform.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/FlatMapTransform.java
@@ -25,6 +25,7 @@
 
 /**
  * Flatmap Transform that flattens each output element after mapping each elements to an iterator.
+ *
  * @param <T> input type.
  * @param <U> output type.
  */
@@ -34,6 +35,7 @@
 
   /**
    * Constructor.
+   *
    * @param func flat map function.
    */
   public FlatMapTransform(final FlatMapFunction<T, U> func) {
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/GroupByKeyTransform.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/GroupByKeyTransform.java
index 7d39aaf..3c36ba6 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/GroupByKeyTransform.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/GroupByKeyTransform.java
@@ -23,10 +23,14 @@
 import org.apache.nemo.common.ir.vertex.transform.Transform;
 import scala.Tuple2;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 /**
  * Transform for group by key transformation.
+ *
  * @param <K> key type.
  * @param <V> value type.
  */
@@ -58,7 +62,7 @@
   @Override
   public void close() {
     keyToValues.entrySet().stream().map(entry -> new Tuple2<>(entry.getKey(), (Iterable<V>) entry.getValue()))
-        .forEach(outputCollector::emit);
+      .forEach(outputCollector::emit);
     keyToValues.clear();
   }
 }
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/HDFSTextFileTransform.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/HDFSTextFileTransform.java
index cf5f14d..6822498 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/HDFSTextFileTransform.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/HDFSTextFileTransform.java
@@ -18,13 +18,13 @@
  */
 package org.apache.nemo.compiler.frontend.spark.transform;
 
-import org.apache.nemo.common.ir.OutputCollector;
-import org.apache.nemo.common.ir.vertex.transform.NoWatermarkEmitTransform;
-import org.apache.nemo.common.ir.vertex.transform.Transform;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.nemo.common.ir.OutputCollector;
+import org.apache.nemo.common.ir.vertex.transform.NoWatermarkEmitTransform;
+import org.apache.nemo.common.ir.vertex.transform.Transform;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -33,6 +33,7 @@
 
 /**
  * Transform which saves elements to a HDFS text file for Spark.
+ *
  * @param <I> input type.
  */
 public final class HDFSTextFileTransform<I> extends NoWatermarkEmitTransform<I, String> {
@@ -63,8 +64,8 @@
   @Override
   public void close() {
     try (
-        final FileSystem fileSystem = fileName.getFileSystem(new JobConf());
-        final FSDataOutputStream outputStream = fileSystem.create(fileName, false);
+      final FileSystem fileSystem = fileName.getFileSystem(new JobConf());
+      final FSDataOutputStream outputStream = fileSystem.create(fileName, false);
     ) {
       for (final I element : elements) {
         outputStream.writeBytes(element + "\n");
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/LocalTextFileTransform.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/LocalTextFileTransform.java
index 8f6cc8b..1de8151 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/LocalTextFileTransform.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/LocalTextFileTransform.java
@@ -28,6 +28,7 @@
 
 /**
  * Transform which saves elements to a local text file for Spark.
+ *
  * @param <I> input type.
  */
 public final class LocalTextFileTransform<I> extends NoWatermarkEmitTransform<I, String> {
@@ -58,8 +59,8 @@
   @Override
   public void close() {
     try (
-        final Writer writer =
-            new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName, false), "utf-8"))
+      final Writer writer =
+        new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName, false), "utf-8"))
     ) {
       for (final I element : elements) {
         writer.write(element + "\n");
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/MapToPairTransform.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/MapToPairTransform.java
index 32f3d12..4745574 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/MapToPairTransform.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/MapToPairTransform.java
@@ -26,6 +26,7 @@
 
 /**
  * Map elements to Pair elements.
+ *
  * @param <T> input type.
  * @param <K> output key type.
  * @param <V> output value type.
@@ -36,6 +37,7 @@
 
   /**
    * Constructor.
+   *
    * @param func Pair function to apply to each element.
    */
   public MapToPairTransform(final PairFunction<T, K, V> func) {
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/MapTransform.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/MapTransform.java
index 0774b2c..177fc5c 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/MapTransform.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/MapTransform.java
@@ -25,6 +25,7 @@
 
 /**
  * Map Transform for Spark.
+ *
  * @param <I> input type.
  * @param <O> output type.
  */
@@ -34,6 +35,7 @@
 
   /**
    * Constructor.
+   *
    * @param func the function to run map with.
    */
   public MapTransform(final Function<I, O> func) {
@@ -47,11 +49,11 @@
 
   @Override
   public void onData(final I element) {
-      try {
-        outputCollector.emit(func.call(element));
-      } catch (Exception e) {
-        throw new RuntimeException(e);
-      }
+    try {
+      outputCollector.emit(func.call(element));
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
   }
 
   @Override
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/ReduceByKeyTransform.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/ReduceByKeyTransform.java
index bea19d3..9a66d72 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/ReduceByKeyTransform.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/ReduceByKeyTransform.java
@@ -25,10 +25,14 @@
 import org.slf4j.LoggerFactory;
 import scala.Tuple2;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 /**
  * Transform for reduce by key transformation.
+ *
  * @param <K> key type.
  * @param <V> value type.
  */
@@ -41,6 +45,7 @@
 
   /**
    * Constructor.
+   *
    * @param func reduce function.
    */
   public ReduceByKeyTransform(final Function2<V, V, V> func) {
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/ReduceTransform.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/ReduceTransform.java
index 2141017..6dbe172 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/ReduceTransform.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/ReduceTransform.java
@@ -38,6 +38,7 @@
 
   /**
    * Constructor.
+   *
    * @param func function to run for the reduce transform.
    */
   public ReduceTransform(final Function2<T, T, T> func) {
@@ -76,9 +77,10 @@
 
   /**
    * Reduce the iterator elements into a single object.
+   *
    * @param elements the iterator of elements.
-   * @param func function to apply for reduction.
-   * @param <T> type of the elements.
+   * @param func     function to apply for reduction.
+   * @param <T>      type of the elements.
    * @return the reduced element.
    */
   @Nullable
diff --git a/compiler/optimizer/pom.xml b/compiler/optimizer/pom.xml
index 44dca09..fb09507 100644
--- a/compiler/optimizer/pom.xml
+++ b/compiler/optimizer/pom.xml
@@ -17,44 +17,45 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-compiler</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-compiler</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
 
-    <artifactId>nemo-compiler-optimizer</artifactId>
-    <name>Nemo Compiler Optimizer</name>
+  <artifactId>nemo-compiler-optimizer</artifactId>
+  <name>Nemo Compiler Optimizer</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.commons</groupId>
-            <artifactId>commons-math3</artifactId>
-            <version>${commons-math.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-      <dependency>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-conf</artifactId>
-        <version>${project.version}</version>
-      </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-core</artifactId>
-            <version>${jackson.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-databind</artifactId>
-            <version>${jackson.version}</version>
-        </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-math3</artifactId>
+      <version>${commons-math.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-conf</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-core</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+  </dependencies>
 </project>
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/NemoOptimizer.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/NemoOptimizer.java
index 3db9529..1b5ab48 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/NemoOptimizer.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/NemoOptimizer.java
@@ -53,7 +53,7 @@
 
   /**
    * @param dagDirectory to store JSON representation of intermediate DAGs.
-   * @param policyName the name of the optimization policy.
+   * @param policyName   the name of the optimization policy.
    */
   @Inject
   private NemoOptimizer(@Parameter(JobConf.DAGDirectory.class) final String dagDirectory,
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/Optimizer.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/Optimizer.java
index 4f4e142..be5f149 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/Optimizer.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/Optimizer.java
@@ -40,7 +40,7 @@
   /**
    * Optimize the submitted DAG at run time.
    *
-   * @param dag input.
+   * @param dag     input.
    * @param message for optimization.
    * @return optimized DAG.
    */
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/examples/MapReduceDisaggregationOptimization.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/examples/MapReduceDisaggregationOptimization.java
index ec533ad..c7e5f8f 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/examples/MapReduceDisaggregationOptimization.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/examples/MapReduceDisaggregationOptimization.java
@@ -45,6 +45,7 @@
 
   /**
    * Main function of the example MR program.
+   *
    * @param args arguments.
    * @throws Exception Exceptions on the way.
    */
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/CompileTimePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/CompileTimePass.java
index 5f5dbb7..1f77e17 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/CompileTimePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/CompileTimePass.java
@@ -32,6 +32,7 @@
 public abstract class CompileTimePass extends Pass implements Function<IRDAG, IRDAG> {
   /**
    * Getter for prerequisite execution properties.
+   *
    * @return set of prerequisite execution properties.
    */
   public abstract Set<Class<? extends ExecutionProperty>> getPrerequisiteExecutionProperties();
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/AnnotatingPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/AnnotatingPass.java
index e32415a..a33053a 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/AnnotatingPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/AnnotatingPass.java
@@ -36,6 +36,7 @@
 
   /**
    * Constructor.
+   *
    * @param cls the annotating pass class.
    */
   public AnnotatingPass(final Class<? extends AnnotatingPass> cls) {
@@ -44,11 +45,12 @@
 
     final Requires requires = cls.getAnnotation(Requires.class);
     this.prerequisiteExecutionProperties = requires == null
-        ? new HashSet<>() : new HashSet<>(Arrays.asList(requires.value()));
+      ? new HashSet<>() : new HashSet<>(Arrays.asList(requires.value()));
   }
 
   /**
    * Getter for the execution properties to annotate through the pass.
+   *
    * @return key of execution properties to annotate through the pass.
    */
   public final Set<Class<? extends ExecutionProperty>> getExecutionPropertiesToAnnotate() {
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/CompressionPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/CompressionPass.java
index b8c1b18..e4069b7 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/CompressionPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/CompressionPass.java
@@ -39,6 +39,7 @@
 
   /**
    * Constructor.
+   *
    * @param compression Compression to apply on edges.
    */
   public CompressionPass(final CompressionProperty.Value compression) {
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultDataPersistencePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultDataPersistencePass.java
index 1a947f4..17d0f50 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultDataPersistencePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultDataPersistencePass.java
@@ -40,18 +40,18 @@
   @Override
   public IRDAG apply(final IRDAG dag) {
     dag.topologicalDo(irVertex ->
-        dag.getIncomingEdgesOf(irVertex).forEach(irEdge -> {
-          if (!irEdge.getPropertyValue(DataPersistenceProperty.class).isPresent()) {
-            final DataStoreProperty.Value dataStoreValue
-                = irEdge.getPropertyValue(DataStoreProperty.class).get();
-            if (DataStoreProperty.Value.MemoryStore.equals(dataStoreValue)
-                || DataStoreProperty.Value.SerializedMemoryStore.equals(dataStoreValue)) {
-              irEdge.setProperty(DataPersistenceProperty.of(DataPersistenceProperty.Value.Discard));
-            } else {
-              irEdge.setProperty(DataPersistenceProperty.of(DataPersistenceProperty.Value.Keep));
-            }
+      dag.getIncomingEdgesOf(irVertex).forEach(irEdge -> {
+        if (!irEdge.getPropertyValue(DataPersistenceProperty.class).isPresent()) {
+          final DataStoreProperty.Value dataStoreValue
+            = irEdge.getPropertyValue(DataStoreProperty.class).get();
+          if (DataStoreProperty.Value.MemoryStore.equals(dataStoreValue)
+            || DataStoreProperty.Value.SerializedMemoryStore.equals(dataStoreValue)) {
+            irEdge.setProperty(DataPersistenceProperty.of(DataPersistenceProperty.Value.Discard));
+          } else {
+            irEdge.setProperty(DataPersistenceProperty.of(DataPersistenceProperty.Value.Keep));
           }
-        }));
+        }
+      }));
     return dag;
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultDataStorePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultDataStorePass.java
index f8ac7e6..4d179d0 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultDataStorePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultDataStorePass.java
@@ -37,9 +37,9 @@
   public IRDAG apply(final IRDAG dag) {
     dag.getVertices().forEach(vertex -> {
       dag.getIncomingEdgesOf(vertex).stream()
-          .filter(edge -> !edge.getPropertyValue(DataStoreProperty.class).isPresent())
-          .forEach(edge -> edge.setProperty(
-              DataStoreProperty.of(DataStoreProperty.Value.LocalFileStore)));
+        .filter(edge -> !edge.getPropertyValue(DataStoreProperty.class).isPresent())
+        .forEach(edge -> edge.setProperty(
+          DataStoreProperty.of(DataStoreProperty.Value.LocalFileStore)));
     });
     return dag;
   }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultEdgeDecoderPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultEdgeDecoderPass.java
index e513065..53a877c 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultEdgeDecoderPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultEdgeDecoderPass.java
@@ -29,7 +29,7 @@
 public final class DefaultEdgeDecoderPass extends AnnotatingPass {
 
   private static final DecoderProperty DEFAULT_DECODER_PROPERTY =
-      DecoderProperty.of(DecoderFactory.DUMMY_DECODER_FACTORY);
+    DecoderProperty.of(DecoderFactory.DUMMY_DECODER_FACTORY);
 
   /**
    * Default constructor.
@@ -41,11 +41,11 @@
   @Override
   public IRDAG apply(final IRDAG dag) {
     dag.topologicalDo(irVertex ->
-        dag.getIncomingEdgesOf(irVertex).forEach(irEdge -> {
-          if (!irEdge.getPropertyValue(DecoderProperty.class).isPresent()) {
-            irEdge.setProperty(DEFAULT_DECODER_PROPERTY);
-          }
-        }));
+      dag.getIncomingEdgesOf(irVertex).forEach(irEdge -> {
+        if (!irEdge.getPropertyValue(DecoderProperty.class).isPresent()) {
+          irEdge.setProperty(DEFAULT_DECODER_PROPERTY);
+        }
+      }));
     return dag;
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultEdgeEncoderPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultEdgeEncoderPass.java
index 790391c..030decb 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultEdgeEncoderPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultEdgeEncoderPass.java
@@ -29,7 +29,7 @@
 public final class DefaultEdgeEncoderPass extends AnnotatingPass {
 
   private static final EncoderProperty DEFAULT_DECODER_PROPERTY =
-      EncoderProperty.of(EncoderFactory.DUMMY_ENCODER_FACTORY);
+    EncoderProperty.of(EncoderFactory.DUMMY_ENCODER_FACTORY);
 
   /**
    * Default constructor.
@@ -41,11 +41,11 @@
   @Override
   public IRDAG apply(final IRDAG dag) {
     dag.topologicalDo(irVertex ->
-        dag.getIncomingEdgesOf(irVertex).forEach(irEdge -> {
-          if (!irEdge.getPropertyValue(EncoderProperty.class).isPresent()) {
-            irEdge.setProperty(DEFAULT_DECODER_PROPERTY);
-          }
-        }));
+      dag.getIncomingEdgesOf(irVertex).forEach(irEdge -> {
+        if (!irEdge.getPropertyValue(EncoderProperty.class).isPresent()) {
+          irEdge.setProperty(DEFAULT_DECODER_PROPERTY);
+        }
+      }));
     return dag;
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultParallelismPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultParallelismPass.java
index 614c53f..0d45249 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultParallelismPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultParallelismPass.java
@@ -74,22 +74,22 @@
           // We manipulate them if it is set as default value of 1.
           if (!originalParallelism.isPresent()) {
             vertex.setProperty(ParallelismProperty.of(
-                sourceVertex.getReadables(desiredSourceParallelism).size()));
+              sourceVertex.getReadables(desiredSourceParallelism).size()));
           }
         } else if (!inEdges.isEmpty()) {
           // No reason to propagate via Broadcast edges, as the data streams that will use the broadcasted data
           // as a sideInput will have their own number of parallelism
           final Integer o2oParallelism = inEdges.stream()
-             .filter(edge -> CommunicationPatternProperty.Value.OneToOne
-                  .equals(edge.getPropertyValue(CommunicationPatternProperty.class).get()))
-              .mapToInt(edge -> edge.getSrc().getPropertyValue(ParallelismProperty.class).get())
-              .max().orElse(1);
+            .filter(edge -> CommunicationPatternProperty.Value.OneToOne
+              .equals(edge.getPropertyValue(CommunicationPatternProperty.class).get()))
+            .mapToInt(edge -> edge.getSrc().getPropertyValue(ParallelismProperty.class).get())
+            .max().orElse(1);
           final Integer shuffleParallelism = inEdges.stream()
-              .filter(edge -> CommunicationPatternProperty.Value.Shuffle
-                  .equals(edge.getPropertyValue(CommunicationPatternProperty.class).get()))
-              .mapToInt(edge -> edge.getSrc().getPropertyValue(ParallelismProperty.class).get())
-              .map(i -> i / shuffleDecreaseFactor)
-              .max().orElse(1);
+            .filter(edge -> CommunicationPatternProperty.Value.Shuffle
+              .equals(edge.getPropertyValue(CommunicationPatternProperty.class).get()))
+            .mapToInt(edge -> edge.getSrc().getPropertyValue(ParallelismProperty.class).get())
+            .map(i -> i / shuffleDecreaseFactor)
+            .max().orElse(1);
           // We set the greater value as the parallelism.
           final Integer parallelism = o2oParallelism > shuffleParallelism ? o2oParallelism : shuffleParallelism;
           vertex.setProperty(ParallelismProperty.of(parallelism));
@@ -97,7 +97,7 @@
           recursivelySynchronizeO2OParallelism(dag, vertex, parallelism);
         } else if (!vertex.getPropertyValue(ParallelismProperty.class).isPresent()) {
           throw new RuntimeException("There is a non-source vertex that doesn't have any inEdges "
-              + "(excluding SideInput edges)");
+            + "(excluding SideInput edges)");
         } // No problem otherwise.
       } catch (Exception e) {
         throw new RuntimeException(e);
@@ -108,8 +108,9 @@
 
   /**
    * Recursively synchronize parallelism for vertices connected by one-to-one edges.
-   * @param dag the original DAG.
-   * @param vertex vertex to observe and update.
+   *
+   * @param dag         the original DAG.
+   * @param vertex      vertex to observe and update.
    * @param parallelism the parallelism of the most recently updated descendant.
    * @return the max value of parallelism among those observed.
    */
@@ -117,11 +118,11 @@
                                                       final Integer parallelism) {
     final List<IREdge> inEdges = dag.getIncomingEdgesOf(vertex);
     final Integer ancestorParallelism = inEdges.stream()
-        .filter(edge -> CommunicationPatternProperty.Value.OneToOne
-            .equals(edge.getPropertyValue(CommunicationPatternProperty.class).get()))
-        .map(IREdge::getSrc)
-        .mapToInt(inVertex -> recursivelySynchronizeO2OParallelism(dag, inVertex, parallelism))
-        .max().orElse(1);
+      .filter(edge -> CommunicationPatternProperty.Value.OneToOne
+        .equals(edge.getPropertyValue(CommunicationPatternProperty.class).get()))
+      .map(IREdge::getSrc)
+      .mapToInt(inVertex -> recursivelySynchronizeO2OParallelism(dag, inVertex, parallelism))
+      .max().orElse(1);
     final Integer maxParallelism = ancestorParallelism > parallelism ? ancestorParallelism : parallelism;
     final Integer myParallelism = vertex.getPropertyValue(ParallelismProperty.class).get();
 
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultScheduleGroupPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultScheduleGroupPass.java
index 29dfca0..e1800d4 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultScheduleGroupPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultScheduleGroupPass.java
@@ -42,24 +42,24 @@
 
 /**
  * A pass for assigning each stages in schedule groups.
- *
+ * <p>
  * TODO #347: IRDAG#partitionAcyclically
  * This code can be greatly simplified...
  *
  * <h3>Rules</h3>
  * <ul>
- *   <li>Vertices connected with push edges must be assigned same ScheduleGroup.</li>
- *   <li>For pull edges,
- *     <ul>
- *       <li>if the destination of the edge depends on multiple ScheduleGroups, split ScheduleGroup by the edge.</li>
- *       <li>if the edge is broadcast type and {@code allowBroadcastWithinScheduleGroup} is {@code false},
- *       split ScheduleGroup by the edge.</li>
- *       <li>if the edge is shuffle type and {@code allowShuffleWithinScheduleGroup} is {@code false},
- *       split ScheduleGroup by the edge.</li>
- *       <li>if the destination of the edge has multiple inEdges, split ScheduleGroup by the edge.</li>
- *       <li>Otherwise, the source and the destination of the edge should be assigned same ScheduleGroup.</li>
- *     </ul>
- *   </li>
+ * <li>Vertices connected with push edges must be assigned same ScheduleGroup.</li>
+ * <li>For pull edges,
+ * <ul>
+ * <li>if the destination of the edge depends on multiple ScheduleGroups, split ScheduleGroup by the edge.</li>
+ * <li>if the edge is broadcast type and {@code allowBroadcastWithinScheduleGroup} is {@code false},
+ * split ScheduleGroup by the edge.</li>
+ * <li>if the edge is shuffle type and {@code allowShuffleWithinScheduleGroup} is {@code false},
+ * split ScheduleGroup by the edge.</li>
+ * <li>if the destination of the edge has multiple inEdges, split ScheduleGroup by the edge.</li>
+ * <li>Otherwise, the source and the destination of the edge should be assigned same ScheduleGroup.</li>
+ * </ul>
+ * </li>
  * </ul>
  */
 @Annotates(ScheduleGroupProperty.class)
@@ -80,8 +80,9 @@
 
   /**
    * Constructor.
-   * @param allowBroadcastWithinScheduleGroup whether to allow Broadcast edges within a ScheduleGroup or not
-   * @param allowShuffleWithinScheduleGroup whether to allow Shuffle edges within a ScheduleGroup or not
+   *
+   * @param allowBroadcastWithinScheduleGroup       whether to allow Broadcast edges within a ScheduleGroup or not
+   * @param allowShuffleWithinScheduleGroup         whether to allow Shuffle edges within a ScheduleGroup or not
    * @param allowMultipleInEdgesWithinScheduleGroup whether to allow vertices with multiple dependencies or not
    */
   public DefaultScheduleGroupPass(final boolean allowBroadcastWithinScheduleGroup,
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DuplicateEdgeGroupSizePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DuplicateEdgeGroupSizePass.java
index 0e49943..6ebd2a2 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DuplicateEdgeGroupSizePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DuplicateEdgeGroupSizePass.java
@@ -42,27 +42,27 @@
   public IRDAG apply(final IRDAG dag) {
     final HashMap<String, Integer> groupIdToGroupSize = new HashMap<>();
     dag.topologicalDo(vertex -> dag.getIncomingEdgesOf(vertex)
-        .forEach(e -> {
-          final Optional<DuplicateEdgeGroupPropertyValue> duplicateEdgeGroupProperty =
-              e.getPropertyValue(DuplicateEdgeGroupProperty.class);
-          if (duplicateEdgeGroupProperty.isPresent()) {
-            final String groupId = duplicateEdgeGroupProperty.get().getGroupId();
-            final Integer currentCount = groupIdToGroupSize.getOrDefault(groupId, 0);
-            groupIdToGroupSize.put(groupId, currentCount + 1);
-          }
-        }));
+      .forEach(e -> {
+        final Optional<DuplicateEdgeGroupPropertyValue> duplicateEdgeGroupProperty =
+          e.getPropertyValue(DuplicateEdgeGroupProperty.class);
+        if (duplicateEdgeGroupProperty.isPresent()) {
+          final String groupId = duplicateEdgeGroupProperty.get().getGroupId();
+          final Integer currentCount = groupIdToGroupSize.getOrDefault(groupId, 0);
+          groupIdToGroupSize.put(groupId, currentCount + 1);
+        }
+      }));
 
     dag.topologicalDo(vertex -> dag.getIncomingEdgesOf(vertex)
-        .forEach(e -> {
-          final Optional<DuplicateEdgeGroupPropertyValue> duplicateEdgeGroupProperty =
-              e.getPropertyValue(DuplicateEdgeGroupProperty.class);
-          if (duplicateEdgeGroupProperty.isPresent()) {
-            final String groupId = duplicateEdgeGroupProperty.get().getGroupId();
-            if (groupIdToGroupSize.containsKey(groupId)) {
-              duplicateEdgeGroupProperty.get().setGroupSize(groupIdToGroupSize.get(groupId));
-            }
+      .forEach(e -> {
+        final Optional<DuplicateEdgeGroupPropertyValue> duplicateEdgeGroupProperty =
+          e.getPropertyValue(DuplicateEdgeGroupProperty.class);
+        if (duplicateEdgeGroupProperty.isPresent()) {
+          final String groupId = duplicateEdgeGroupProperty.get().getGroupId();
+          if (groupIdToGroupSize.containsKey(groupId)) {
+            duplicateEdgeGroupProperty.get().setGroupSize(groupIdToGroupSize.get(groupId));
           }
-        }));
+        }
+      }));
     return dag;
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/LargeShuffleAnnotatingPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/LargeShuffleAnnotatingPass.java
index 6dc399b..120c598 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/LargeShuffleAnnotatingPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/LargeShuffleAnnotatingPass.java
@@ -19,23 +19,26 @@
 package org.apache.nemo.compiler.optimizer.pass.compiletime.annotating;
 
 import org.apache.nemo.common.ir.IRDAG;
-import org.apache.nemo.common.ir.edge.executionproperty.*;
+import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
+import org.apache.nemo.common.ir.edge.executionproperty.DataFlowProperty;
+import org.apache.nemo.common.ir.edge.executionproperty.DataPersistenceProperty;
+import org.apache.nemo.common.ir.edge.executionproperty.DataStoreProperty;
 import org.apache.nemo.common.ir.vertex.executionproperty.ResourceSlotProperty;
 import org.apache.nemo.common.ir.vertex.utility.StreamVertex;
 import org.apache.nemo.compiler.optimizer.pass.compiletime.Requires;
 
 /**
  * This pass assumes that a StreamVertex was previously inserted to receive each shuffle edge.
- *
+ * <p>
  * src - shuffle-edge - streamvertex - one-to-one-edge - dst
- *
+ * <p>
  * (1) shuffle-edge
  * Encode/compress into byte[], and have the receiver read data as the same byte[], rather than decompressing/decoding.
  * Perform a push-based in-memory shuffle with discarding on.
- *
+ * <p>
  * (2) streamvertex
  * Ignore resource slots, such that all tasks fetch the in-memory input data blocks as soon as they become available.
- *
+ * <p>
  * (3) one-to-one-edge
  * Do not encode/compress the byte[]
  * Perform a pull-based and on-disk data transfer with the DedicatedKeyPerElement.
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/PipeTransferForAllEdgesPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/PipeTransferForAllEdgesPass.java
index 37ad2cb..2436b35 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/PipeTransferForAllEdgesPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/PipeTransferForAllEdgesPass.java
@@ -37,8 +37,8 @@
   public IRDAG apply(final IRDAG dag) {
     dag.getVertices().forEach(vertex -> {
       dag.getIncomingEdgesOf(vertex).stream()
-          .forEach(edge -> edge.setPropertyPermanently(
-              DataStoreProperty.of(DataStoreProperty.Value.Pipe)));
+        .forEach(edge -> edge.setPropertyPermanently(
+          DataStoreProperty.of(DataStoreProperty.Value.Pipe)));
     });
     return dag;
   }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ResourceLocalityPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ResourceLocalityPass.java
index 8294beb..b13e8ab 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ResourceLocalityPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ResourceLocalityPass.java
@@ -38,8 +38,8 @@
   public IRDAG apply(final IRDAG dag) {
     // On every vertex, if ResourceLocalityProperty is not set, put it as true.
     dag.getVertices().stream()
-        .filter(v -> !v.getPropertyValue(ResourceLocalityProperty.class).isPresent())
-        .forEach(v -> v.setProperty(ResourceLocalityProperty.of(true)));
+      .filter(v -> !v.getPropertyValue(ResourceLocalityProperty.class).isPresent())
+      .forEach(v -> v.setProperty(ResourceLocalityProperty.of(true)));
     return dag;
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ResourceSitePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ResourceSitePass.java
index 1820a7c..38d4e37 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ResourceSitePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ResourceSitePass.java
@@ -91,7 +91,7 @@
   }
 
   /**
-   * @param nodes to distribute the shares
+   * @param nodes       to distribute the shares
    * @param parallelism number of parallel tasks.
    * @return share for each node.
    */
@@ -106,23 +106,23 @@
   }
 
   /**
-   * @param dag IR DAG.
+   * @param dag                    IR DAG.
    * @param bandwidthSpecification bandwidth specification.
    */
   private static void assignNodeShares(
-      final IRDAG dag,
-      final BandwidthSpecification bandwidthSpecification) {
+    final IRDAG dag,
+    final BandwidthSpecification bandwidthSpecification) {
     dag.topologicalDo(irVertex -> {
       final Collection<IREdge> inEdges = dag.getIncomingEdgesOf(irVertex);
       final int parallelism = irVertex.getPropertyValue(ParallelismProperty.class)
-          .orElseThrow(() -> new RuntimeException("Parallelism property required"));
+        .orElseThrow(() -> new RuntimeException("Parallelism property required"));
       if (inEdges.size() == 0) {
         // This vertex is root vertex.
         // Fall back to setting even distribution
         irVertex.setProperty(ResourceSiteProperty.of(EMPTY_MAP));
       } else if (isOneToOneEdge(inEdges)) {
         final Optional<HashMap<String, Integer>> property = inEdges.iterator().next().getSrc()
-            .getPropertyValue(ResourceSiteProperty.class);
+          .getPropertyValue(ResourceSiteProperty.class);
         irVertex.setProperty(ResourceSiteProperty.of(property.get()));
       } else {
         // This IRVertex has shuffle inEdge(s), or has multiple inEdges.
@@ -131,13 +131,13 @@
           final IRVertex parentVertex = edgeToIRVertex.getSrc();
           final Map<String, Integer> parentShares = parentVertex.getPropertyValue(ResourceSiteProperty.class).get();
           final int parentParallelism = parentVertex.getPropertyValue(ParallelismProperty.class)
-              .orElseThrow(() -> new RuntimeException("Parallelism property required"));
+            .orElseThrow(() -> new RuntimeException("Parallelism property required"));
           final Map<String, Integer> shares = parentShares.isEmpty() ? getEvenShares(bandwidthSpecification.getNodes(),
-              parentParallelism) : parentShares;
+            parentParallelism) : parentShares;
           for (final Map.Entry<String, Integer> element : shares.entrySet()) {
             parentLocationShares.putIfAbsent(element.getKey(), 0);
             parentLocationShares.put(element.getKey(),
-                element.getValue() + parentLocationShares.get(element.getKey()));
+              element.getValue() + parentLocationShares.get(element.getKey()));
           }
         }
         final double[] ratios = optimize(bandwidthSpecification, parentLocationShares);
@@ -164,14 +164,15 @@
    */
   private static boolean isOneToOneEdge(final Collection<IREdge> inEdges) {
     return inEdges.size() == 1 && inEdges.iterator().next()
-          .getPropertyValue(CommunicationPatternProperty.class).get()
-          .equals(CommunicationPatternProperty.Value.OneToOne);
+      .getPropertyValue(CommunicationPatternProperty.class).get()
+      .equals(CommunicationPatternProperty.Value.OneToOne);
   }
 
   /**
    * Computes share of parallelism that each node is responsible for.
+   *
    * @param bandwidthSpecification provides bandwidth information between nodes
-   * @param parentNodeShares shares of parallelism for the parent vertex
+   * @param parentNodeShares       shares of parallelism for the parent vertex
    * @return array of fractions of parallelism that each node is responsible for
    */
   private static double[] optimize(final BandwidthSpecification bandwidthSpecification,
@@ -191,7 +192,7 @@
       uploadCoefficientVector[OBJECTIVE_COEFFICIENT_INDEX] = bandwidthSpecification.up(nodeName);
       uploadCoefficientVector[nodeCoefficientIndex] = parentParallelismOnThisLocation;
       constraints.add(new LinearConstraint(uploadCoefficientVector, Relationship.GEQ,
-          parentParallelismOnThisLocation));
+        parentParallelismOnThisLocation));
 
       // Download bandwidth
       final double[] downloadCoefficientVector = new double[coefficientVectorSize];
@@ -226,7 +227,7 @@
       incrementor.setMaximalCount(2147483647);
       LOG.info(String.format("Max iterations: %d", solver.getMaxIterations()));
       final PointValuePair solved = solver.optimize(
-          new LinearConstraintSet(constraints), objectiveFunction, GoalType.MINIMIZE);
+        new LinearConstraintSet(constraints), objectiveFunction, GoalType.MINIMIZE);
 
       return Arrays.copyOfRange(solved.getPoint(), OBJECTIVE_COEFFICIENT_INDEX + 1, coefficientVectorSize);
     } catch (final NoSuchFieldException | IllegalAccessException e) {
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ResourceSlotPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ResourceSlotPass.java
index abc4c77..25bac1e 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ResourceSlotPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ResourceSlotPass.java
@@ -38,8 +38,8 @@
   public IRDAG apply(final IRDAG dag) {
     // On every vertex, if ResourceSlotProperty is not set, put it as true.
     dag.getVertices().stream()
-        .filter(v -> !v.getPropertyValue(ResourceSlotProperty.class).isPresent())
-        .forEach(v -> v.setProperty(ResourceSlotProperty.of(true)));
+      .filter(v -> !v.getPropertyValue(ResourceSlotProperty.class).isPresent())
+      .forEach(v -> v.setProperty(ResourceSlotProperty.of(true)));
     return dag;
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ShuffleEdgePushPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ShuffleEdgePushPass.java
index 007ce0e..9249a45 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ShuffleEdgePushPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/ShuffleEdgePushPass.java
@@ -47,7 +47,7 @@
       if (!inEdges.isEmpty()) {
         inEdges.forEach(edge -> {
           if (edge.getPropertyValue(CommunicationPatternProperty.class).get()
-              .equals(CommunicationPatternProperty.Value.Shuffle)) {
+            .equals(CommunicationPatternProperty.Value.Shuffle)) {
             edge.setProperty(DataFlowProperty.of(DataFlowProperty.Value.Push));
           }
         });
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/TransientResourceDataStorePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/TransientResourceDataStorePass.java
index 76136eb..2bc778e 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/TransientResourceDataStorePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/TransientResourceDataStorePass.java
@@ -62,25 +62,27 @@
 
   /**
    * checks if the edge is from transient container to a reserved container.
+   *
    * @param irEdge edge to check.
    * @return whether or not the edge satisfies the condition.
    */
   static boolean fromTransientToReserved(final IREdge irEdge) {
     return ResourcePriorityProperty.TRANSIENT
-        .equals(irEdge.getSrc().getPropertyValue(ResourcePriorityProperty.class).get())
-        && ResourcePriorityProperty.RESERVED
-        .equals(irEdge.getDst().getPropertyValue(ResourcePriorityProperty.class).get());
+      .equals(irEdge.getSrc().getPropertyValue(ResourcePriorityProperty.class).get())
+      && ResourcePriorityProperty.RESERVED
+      .equals(irEdge.getDst().getPropertyValue(ResourcePriorityProperty.class).get());
   }
 
   /**
    * checks if the edge is from reserved container to a transient container.
+   *
    * @param irEdge edge to check.
    * @return whether or not the edge satisfies the condition.
    */
   static boolean fromReservedToTransient(final IREdge irEdge) {
     return ResourcePriorityProperty.RESERVED
-        .equals(irEdge.getSrc().getPropertyValue(ResourcePriorityProperty.class).get())
-        && ResourcePriorityProperty.TRANSIENT
-        .equals(irEdge.getDst().getPropertyValue(ResourcePriorityProperty.class).get());
+      .equals(irEdge.getSrc().getPropertyValue(ResourcePriorityProperty.class).get())
+      && ResourcePriorityProperty.TRANSIENT
+      .equals(irEdge.getDst().getPropertyValue(ResourcePriorityProperty.class).get());
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/TransientResourcePriorityPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/TransientResourcePriorityPass.java
index 5988d61..46989de 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/TransientResourcePriorityPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/TransientResourcePriorityPass.java
@@ -58,25 +58,27 @@
 
   /**
    * Checks whether the irEdges have M2M relationship.
+   *
    * @param irEdges irEdges to check.
    * @return whether of not any of them has M2M relationship.
    */
   private boolean hasM2M(final List<IREdge> irEdges) {
     return irEdges.stream().anyMatch(edge ->
-        edge.getPropertyValue(CommunicationPatternProperty.class).get()
-          .equals(CommunicationPatternProperty.Value.Shuffle));
+      edge.getPropertyValue(CommunicationPatternProperty.class).get()
+        .equals(CommunicationPatternProperty.Value.Shuffle));
   }
 
   /**
    * Checks whether the irEdges are all from reserved containers.
+   *
    * @param irEdges irEdges to check.
    * @return whether of not they are from reserved containers.
    */
   private boolean allO2OFromReserved(final List<IREdge> irEdges) {
     return irEdges.stream()
-        .allMatch(edge -> CommunicationPatternProperty.Value.OneToOne.equals(
-            edge.getPropertyValue(CommunicationPatternProperty.class).get())
-            && edge.getSrc().getPropertyValue(ResourcePriorityProperty.class).get().equals(
-                ResourcePriorityProperty.RESERVED));
+      .allMatch(edge -> CommunicationPatternProperty.Value.OneToOne.equals(
+        edge.getPropertyValue(CommunicationPatternProperty.class).get())
+        && edge.getSrc().getPropertyValue(ResourcePriorityProperty.class).get().equals(
+        ResourcePriorityProperty.RESERVED));
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/UpfrontCloningPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/UpfrontCloningPass.java
index 17492d4..7a2d9aa 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/UpfrontCloningPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/UpfrontCloningPass.java
@@ -39,17 +39,17 @@
   @Override
   public IRDAG apply(final IRDAG dag) {
     dag.getVertices().stream()
-        .filter(vertex -> dag.getIncomingEdgesOf(vertex.getId())
-          .stream()
-          // TODO #198: Handle Un-cloneable Beam Sink Operators
-          // only shuffle receivers (for now... as particular Beam sink operators fail when cloned)
-          .anyMatch(edge ->
-            edge.getPropertyValue(CommunicationPatternProperty.class)
-              .orElseThrow(() -> new IllegalStateException())
-              .equals(CommunicationPatternProperty.Value.Shuffle))
-          )
-        .forEach(vertex -> vertex.setProperty(
-          ClonedSchedulingProperty.of(new ClonedSchedulingProperty.CloneConf()))); // clone upfront, always
+      .filter(vertex -> dag.getIncomingEdgesOf(vertex.getId())
+        .stream()
+        // TODO #198: Handle Un-cloneable Beam Sink Operators
+        // only shuffle receivers (for now... as particular Beam sink operators fail when cloned)
+        .anyMatch(edge ->
+          edge.getPropertyValue(CommunicationPatternProperty.class)
+            .orElseThrow(() -> new IllegalStateException())
+            .equals(CommunicationPatternProperty.Value.Shuffle))
+      )
+      .forEach(vertex -> vertex.setProperty(
+        ClonedSchedulingProperty.of(new ClonedSchedulingProperty.CloneConf()))); // clone upfront, always
     return dag;
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/CompositePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/CompositePass.java
index 19c2a64..d27854f 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/CompositePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/CompositePass.java
@@ -37,6 +37,7 @@
 
   /**
    * Constructor.
+   *
    * @param passList list of compile time passes.
    */
   public CompositePass(final List<CompileTimePass> passList) {
@@ -52,6 +53,7 @@
 
   /**
    * Getter for list of compile time passes.
+   *
    * @return the list of CompileTimePass.
    */
   public final List<CompileTimePass> getPassList() {
@@ -65,7 +67,8 @@
 
   /**
    * Recursively apply the give list of passes.
-   * @param dag dag.
+   *
+   * @param dag          dag.
    * @param passIterator pass iterator.
    * @return dag.
    */
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/DefaultCompositePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/DefaultCompositePass.java
index 3b869e0..49601f2 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/DefaultCompositePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/DefaultCompositePass.java
@@ -33,15 +33,15 @@
    */
   public DefaultCompositePass() {
     super(Arrays.asList(
-        new DefaultParallelismPass(),
-        new DefaultEdgeEncoderPass(),
-        new DefaultEdgeDecoderPass(),
-        new DefaultDataStorePass(),
-        new DefaultDataPersistencePass(),
-        new DefaultScheduleGroupPass(),
-        new CompressionPass(),
-        new ResourceLocalityPass(),
-        new ResourceSlotPass()
+      new DefaultParallelismPass(),
+      new DefaultEdgeEncoderPass(),
+      new DefaultEdgeDecoderPass(),
+      new DefaultDataStorePass(),
+      new DefaultDataPersistencePass(),
+      new DefaultScheduleGroupPass(),
+      new CompressionPass(),
+      new ResourceLocalityPass(),
+      new ResourceSlotPass()
     ));
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/LargeShuffleCompositePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/LargeShuffleCompositePass.java
index 2752c04..612fb0a 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/LargeShuffleCompositePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/LargeShuffleCompositePass.java
@@ -18,7 +18,7 @@
  */
 package org.apache.nemo.compiler.optimizer.pass.compiletime.composite;
 
-import org.apache.nemo.compiler.optimizer.pass.compiletime.annotating.*;
+import org.apache.nemo.compiler.optimizer.pass.compiletime.annotating.LargeShuffleAnnotatingPass;
 import org.apache.nemo.compiler.optimizer.pass.compiletime.reshaping.LargeShuffleReshapingPass;
 
 import java.util.Arrays;
@@ -33,8 +33,8 @@
    */
   public LargeShuffleCompositePass() {
     super(Arrays.asList(
-        new LargeShuffleReshapingPass(),
-        new LargeShuffleAnnotatingPass()
+      new LargeShuffleReshapingPass(),
+      new LargeShuffleAnnotatingPass()
     ));
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/LoopOptimizationCompositePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/LoopOptimizationCompositePass.java
index e594f82..e36cbda 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/LoopOptimizationCompositePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/LoopOptimizationCompositePass.java
@@ -34,11 +34,11 @@
    */
   public LoopOptimizationCompositePass() {
     super(Arrays.asList(
-        new LoopExtractionPass(),
-        LoopOptimizations.getLoopFusionPass(),
-        LoopOptimizations.getLoopInvariantCodeMotionPass(),
-        new LoopUnrollingPass(), // Groups then unrolls loops.
-        new DuplicateEdgeGroupSizePass()
+      new LoopExtractionPass(),
+      LoopOptimizations.getLoopFusionPass(),
+      LoopOptimizations.getLoopInvariantCodeMotionPass(),
+      new LoopUnrollingPass(), // Groups then unrolls loops.
+      new DuplicateEdgeGroupSizePass()
     ));
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/TransientResourceCompositePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/TransientResourceCompositePass.java
index bccffaf..a848488 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/TransientResourceCompositePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/TransientResourceCompositePass.java
@@ -34,9 +34,9 @@
    */
   public TransientResourceCompositePass() {
     super(Arrays.asList(
-        new TransientResourcePriorityPass(),
-        new TransientResourceDataStorePass(),
-        new TransientResourceDataFlowPass()
+      new TransientResourcePriorityPass(),
+      new TransientResourceDataStorePass(),
+      new TransientResourceDataFlowPass()
     ));
   }
 }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/CommonSubexpressionEliminationPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/CommonSubexpressionEliminationPass.java
index 9e066e1..fe2dff0 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/CommonSubexpressionEliminationPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/CommonSubexpressionEliminationPass.java
@@ -135,10 +135,11 @@
 
   /**
    * merge equivalent operator vertices and add them to the provided builder.
-   * @param ovs operator vertices that are to be merged (if there are no dependencies between them).
-   * @param builder builder to add the merged vertices to.
-   * @param dag dag to observe while adding them.
-   * @param inEdges incoming edges information.
+   *
+   * @param ovs      operator vertices that are to be merged (if there are no dependencies between them).
+   * @param builder  builder to add the merged vertices to.
+   * @param dag      dag to observe while adding them.
+   * @param inEdges  incoming edges information.
    * @param outEdges outgoing edges information.
    */
   private static void mergeAndAddToBuilder(final List<OperatorVertex> ovs, final DAGBuilder<IRVertex, IREdge> builder,
@@ -161,7 +162,7 @@
             outEdges.getOrDefault(ov, new HashSet<>()).forEach(e -> {
               outListToModify.remove(e);
               final IREdge newIrEdge = new IREdge(e.getPropertyValue(CommunicationPatternProperty.class).get(),
-                  operatorVertexToUse, e.getDst());
+                operatorVertexToUse, e.getDst());
               final Optional<EncoderFactory> encoderProperty = e.getPropertyValue(EncoderProperty.class);
               if (encoderProperty.isPresent()) {
                 newIrEdge.setProperty(EncoderProperty.of(encoderProperty.get()));
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopExtractionPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopExtractionPass.java
index 1c37978..8c6b448 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopExtractionPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopExtractionPass.java
@@ -61,14 +61,15 @@
 
   /**
    * This method finds the maximum loop vertex stack depth of a specific DAG. This is to handle nested loops.
+   *
    * @param dag DAG to observe.
    * @return The maximum stack depth of the DAG.
    * @throws Exception exceptions through the way.
    */
   private Integer findMaxLoopVertexStackDepth(final DAG<IRVertex, IREdge> dag) {
     final OptionalInt maxDepth = dag.getVertices().stream().filter(dag::isCompositeVertex)
-        .mapToInt(dag::getLoopStackDepthOf)
-        .max();
+      .mapToInt(dag::getLoopStackDepthOf)
+      .max();
     return maxDepth.orElse(0);
   }
 
@@ -76,7 +77,8 @@
    * This part groups each iteration of loops together by observing the LoopVertex assigned to primitive operators,
    * which is assigned by the NemoPipelineVisitor. This also shows in which depth of
    * nested loops the function handles. It recursively calls itself from the maximum depth until 0.
-   * @param dag DAG to process
+   *
+   * @param dag   DAG to process
    * @param depth the depth of the stack to process. Must be greater than 0.
    * @return processed DAG.
    * @throws Exception exceptions through the way.
@@ -143,9 +145,10 @@
 
   /**
    * Method for connecting an element to a loop. That is, loop -> loop OR operator -> loop.
-   * @param dag to observe the inEdges from.
-   * @param builder to add the new edge to.
-   * @param dstVertex the destination vertex that belongs to a certain loop.
+   *
+   * @param dag                to observe the inEdges from.
+   * @param builder            to add the new edge to.
+   * @param dstVertex          the destination vertex that belongs to a certain loop.
    * @param assignedLoopVertex the loop that dstVertex belongs to.
    */
   private static void connectElementToLoop(final DAG<IRVertex, IREdge> dag, final DAGBuilder<IRVertex, IREdge> builder,
@@ -160,7 +163,7 @@
         } else { // loop -> loop connection
           assignedLoopVertex.addDagIncomingEdge(irEdge);
           final IREdge edgeToLoop = new IREdge(irEdge.getPropertyValue(CommunicationPatternProperty.class).get(),
-              srcLoopVertex, assignedLoopVertex);
+            srcLoopVertex, assignedLoopVertex);
           irEdge.copyExecutionPropertiesTo(edgeToLoop);
           builder.connectVertices(edgeToLoop);
           assignedLoopVertex.mapEdgeWithLoop(edgeToLoop, irEdge);
@@ -168,7 +171,7 @@
       } else { // operator -> loop
         assignedLoopVertex.addDagIncomingEdge(irEdge);
         final IREdge edgeToLoop = new IREdge(irEdge.getPropertyValue(CommunicationPatternProperty.class).get(),
-            irEdge.getSrc(), assignedLoopVertex);
+          irEdge.getSrc(), assignedLoopVertex);
         irEdge.copyExecutionPropertiesTo(edgeToLoop);
         builder.connectVertices(edgeToLoop);
         assignedLoopVertex.mapEdgeWithLoop(edgeToLoop, irEdge);
@@ -179,6 +182,7 @@
   /**
    * This part rolls the repetitive LoopVertices into a single one, leaving only the root LoopVertex.
    * Following iterations can be generated with the information included in the LoopVertex.
+   *
    * @param dag DAG to process.
    * @return Processed DAG.
    * @throws Exception exceptions through the way.
@@ -222,7 +226,7 @@
 
           // Zip current vertices together. We rely on the fact that getTopologicalSort() brings consistent results.
           final Iterator<IRVertex> verticesOfRootLoopVertex =
-              finalRootLoopVertex.getDAG().getTopologicalSort().iterator();
+            finalRootLoopVertex.getDAG().getTopologicalSort().iterator();
           final Iterator<IRVertex> verticesOfCurrentLoopVertex = loopVertex.getDAG().getTopologicalSort().iterator();
           // Map of (RolledVertex --> (Root)Vertex)
           final HashMap<IRVertex, IRVertex> equivalentVertices = equivalentVerticesOfLoops.get(finalRootLoopVertex);
@@ -248,13 +252,13 @@
 
               // add the new IREdge to the iterative incoming edges list.
               final IREdge newIrEdge = new IREdge(edge.getPropertyValue(CommunicationPatternProperty.class).get(),
-                  equivalentSrcVertex, equivalentDstVertex);
+                equivalentSrcVertex, equivalentDstVertex);
               edge.copyExecutionPropertiesTo(newIrEdge);
               finalRootLoopVertex.addIterativeIncomingEdge(newIrEdge);
             } else {
               // src is from outside the previous loop. vertex outside previous loop -> DAG.
               final IREdge newIrEdge = new IREdge(edge.getPropertyValue(CommunicationPatternProperty.class).get(),
-                  srcVertex, equivalentDstVertex);
+                srcVertex, equivalentDstVertex);
               edge.copyExecutionPropertiesTo(newIrEdge);
               finalRootLoopVertex.addNonIterativeIncomingEdge(newIrEdge);
             }
@@ -267,7 +271,7 @@
             final IRVertex equivalentSrcVertex = equivalentVertices.get(srcVertex);
 
             final IREdge newIrEdge = new IREdge(edge.getPropertyValue(CommunicationPatternProperty.class).get(),
-                equivalentSrcVertex, dstVertex);
+              equivalentSrcVertex, dstVertex);
             edge.copyExecutionPropertiesTo(newIrEdge);
             finalRootLoopVertex.addDagOutgoingEdge(newIrEdge);
             finalRootLoopVertex.mapEdgeWithLoop(loopVertex.getEdgeWithLoop(edge), newIrEdge);
@@ -284,9 +288,10 @@
 
   /**
    * Adds the vertex and the incoming edges of the vertex to the builder.
-   * @param builder Builder that it adds to.
-   * @param irVertex Vertex to add.
-   * @param dag DAG to observe the incoming edges of the vertex.
+   *
+   * @param builder                Builder that it adds to.
+   * @param irVertex               Vertex to add.
+   * @param dag                    DAG to observe the incoming edges of the vertex.
    * @param loopVerticesOfSameLoop List that keeps track of the iterations of the identical loop.
    */
   private static void addVertexToBuilder(final DAGBuilder<IRVertex, IREdge> builder, final DAG<IRVertex, IREdge> dag,
@@ -312,7 +317,7 @@
         builder.connectVertices(edge);
       } else {
         final IREdge newIrEdge = new IREdge(edge.getPropertyValue(CommunicationPatternProperty.class).get(),
-            firstEquivalentVertex, irVertex);
+          firstEquivalentVertex, irVertex);
         edge.copyExecutionPropertiesTo(newIrEdge);
         builder.connectVertices(newIrEdge);
       }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopOptimizations.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopOptimizations.java
index 56013c0..8ff03bf 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopOptimizations.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopOptimizations.java
@@ -59,11 +59,12 @@
 
   /**
    * Static function to collect LoopVertices.
-   * @param dag DAG to observe.
+   *
+   * @param dag          DAG to observe.
    * @param loopVertices Map to save the LoopVertices to, according to their termination conditions.
-   * @param inEdges incoming edges of LoopVertices.
-   * @param outEdges outgoing Edges of LoopVertices.
-   * @param builder builder to build the rest of the DAG on.
+   * @param inEdges      incoming edges of LoopVertices.
+   * @param outEdges     outgoing Edges of LoopVertices.
+   * @param builder      builder to build the rest of the DAG on.
    */
   private static void collectLoopVertices(final DAG<IRVertex, IREdge> dag,
                                           final List<LoopVertex> loopVertices,
@@ -209,12 +210,13 @@
 
     /**
      * Merge the list of loopVertices into a single LoopVertex.
+     *
      * @param loopVertices list of LoopVertices to merge.
      * @return the merged single LoopVertex.
      */
     private LoopVertex mergeLoopVertices(final Set<LoopVertex> loopVertices) {
       final String newName =
-          String.join("+", loopVertices.stream().map(LoopVertex::getName).collect(Collectors.toList()));
+        String.join("+", loopVertices.stream().map(LoopVertex::getName).collect(Collectors.toList()));
       final LoopVertex mergedLoopVertex = new LoopVertex(newName);
       loopVertices.forEach(loopVertex -> {
         final DAG<IRVertex, IREdge> dagToCopy = loopVertex.getDAG();
@@ -224,9 +226,9 @@
         });
         loopVertex.getDagIncomingEdges().forEach((v, es) -> es.forEach(mergedLoopVertex::addDagIncomingEdge));
         loopVertex.getIterativeIncomingEdges().forEach((v, es) ->
-            es.forEach(mergedLoopVertex::addIterativeIncomingEdge));
+          es.forEach(mergedLoopVertex::addIterativeIncomingEdge));
         loopVertex.getNonIterativeIncomingEdges().forEach((v, es) ->
-            es.forEach(mergedLoopVertex::addNonIterativeIncomingEdge));
+          es.forEach(mergedLoopVertex::addNonIterativeIncomingEdge));
         loopVertex.getDagOutgoingEdges().forEach((v, es) -> es.forEach(mergedLoopVertex::addDagOutgoingEdge));
       });
       return mergedLoopVertex;
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopUnrollingPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopUnrollingPass.java
index 3d8b119..46a23ab 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopUnrollingPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopUnrollingPass.java
@@ -48,6 +48,7 @@
 
   /**
    * A function that recursively calls Unroll until there are no more LoopVertex left.
+   *
    * @param dag DAG to process.
    * @return DAG without LoopVertex.
    */
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/ReshapingPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/ReshapingPass.java
index dd80faa..e56d46d 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/ReshapingPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/ReshapingPass.java
@@ -35,12 +35,13 @@
 
   /**
    * Constructor.
+   *
    * @param cls the reshaping pass class.
    */
   public ReshapingPass(final Class<? extends ReshapingPass> cls) {
     final Requires requires = cls.getAnnotation(Requires.class);
     this.prerequisiteExecutionProperties = requires == null
-        ? new HashSet<>() : new HashSet<>(Arrays.asList(requires.value()));
+      ? new HashSet<>() : new HashSet<>(Arrays.asList(requires.value()));
   }
 
   /**
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/SamplingSkewReshapingPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/SamplingSkewReshapingPass.java
index 8db8085..efb9416 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/SamplingSkewReshapingPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/SamplingSkewReshapingPass.java
@@ -22,7 +22,9 @@
 import org.apache.nemo.common.dag.Edge;
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.common.ir.edge.IREdge;
-import org.apache.nemo.common.ir.edge.executionproperty.*;
+import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
+import org.apache.nemo.common.ir.edge.executionproperty.DataStoreProperty;
+import org.apache.nemo.common.ir.edge.executionproperty.KeyExtractorProperty;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.vertex.utility.MessageAggregatorVertex;
 import org.apache.nemo.common.ir.vertex.utility.MessageBarrierVertex;
@@ -31,25 +33,28 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Set;
 import java.util.stream.Collectors;
 
 /**
  * Optimizes the PartitionSet property of shuffle edges to handle data skews using the SamplingVertex.
- *
+ * <p>
  * This pass effectively partitions the IRDAG by non-oneToOne edges, clones each subDAG partition using SamplingVertex
  * to process sampled data, and executes each cloned partition prior to executing the corresponding original partition.
- *
+ * <p>
  * Suppose the IRDAG is partitioned into three sub-DAG partitions with shuffle dependencies as follows:
  * P1 - P2 - P3
- *
+ * <p>
  * Then, this pass will produce something like:
  * P1' - P1
- *     - P2' - P2 - P3
+ * - P2' - P2 - P3
  * where Px' consists of SamplingVertex objects that clone the execution of Px.
  * (P3 is not cloned here because it is a sink partition, and none of the outgoing edges of its vertices needs to be
  * optimized)
- *
+ * <p>
  * For each Px' this pass also inserts a MessageBarrierVertex, to use its data statistics for dynamically optimizing
  * the execution behaviors of Px.
  */
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/SkewHandlingUtil.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/SkewHandlingUtil.java
index 6231ba4..c176ad6 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/SkewHandlingUtil.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/SkewHandlingUtil.java
@@ -20,7 +20,10 @@
 
 import org.apache.nemo.common.KeyExtractor;
 import org.apache.nemo.common.Pair;
-import org.apache.nemo.common.coder.*;
+import org.apache.nemo.common.coder.LongDecoderFactory;
+import org.apache.nemo.common.coder.LongEncoderFactory;
+import org.apache.nemo.common.coder.PairDecoderFactory;
+import org.apache.nemo.common.coder.PairEncoderFactory;
 import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.edge.executionproperty.DecoderProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.EncoderProperty;
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/SkewReshapingPass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/SkewReshapingPass.java
index 046dd75..0b843dd 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/SkewReshapingPass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/SkewReshapingPass.java
@@ -21,15 +21,18 @@
 import org.apache.nemo.common.KeyExtractor;
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.common.ir.edge.IREdge;
-import org.apache.nemo.common.ir.edge.executionproperty.*;
+import org.apache.nemo.common.ir.edge.executionproperty.AdditionalOutputTagProperty;
+import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
+import org.apache.nemo.common.ir.edge.executionproperty.KeyExtractorProperty;
 import org.apache.nemo.common.ir.vertex.utility.MessageAggregatorVertex;
 import org.apache.nemo.common.ir.vertex.utility.MessageBarrierVertex;
-import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
 import org.apache.nemo.compiler.optimizer.pass.compiletime.Requires;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 
@@ -37,7 +40,7 @@
  * Pass to reshape the IR DAG for skew handling.
  * We insert a {@link MessageBarrierVertex} for each shuffle edge,
  * and aggregate messages for multiple same-destination shuffle edges.
- * */
+ */
 @Requires(CommunicationPatternProperty.class)
 public final class SkewReshapingPass extends ReshapingPass {
   private static final Logger LOG = LoggerFactory.getLogger(SkewReshapingPass.class.getName());
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/runtime/RunTimePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/runtime/RunTimePass.java
index cfdf936..81c023c 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/runtime/RunTimePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/runtime/RunTimePass.java
@@ -25,6 +25,7 @@
 
 /**
  * Abstract class for dynamic optimization passes, for dynamically optimizing the IRDAG.
+ *
  * @param <T> type of the message used for dynamic optimization.
  */
 public abstract class RunTimePass<T> extends Pass implements BiFunction<IRDAG, Message<T>, IRDAG> {
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/runtime/SkewRunTimePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/runtime/SkewRunTimePass.java
index 5dcfac6..54e4599 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/runtime/SkewRunTimePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/pass/runtime/SkewRunTimePass.java
@@ -19,9 +19,9 @@
 package org.apache.nemo.compiler.optimizer.pass.runtime;
 
 import org.apache.nemo.common.HashRange;
+import org.apache.nemo.common.KeyRange;
 import org.apache.nemo.common.Pair;
 import org.apache.nemo.common.ir.IRDAG;
-import org.apache.nemo.common.KeyRange;
 import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.edge.executionproperty.PartitionSetProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.PartitionerProperty;
@@ -102,10 +102,10 @@
    * redistribute the key range of partitions with approximate size of (total size of partitions / the number of tasks).
    * Assumption: the returned key of the partitioner is always 0 or positive integer.
    *
-   * @param keyToCountMap statistics.
-   * @param partitioner used.
+   * @param keyToCountMap   statistics.
+   * @param partitioner     used.
    * @param numOfPartitions created.
-   * @param dstParallelism of the destination vertex.
+   * @param dstParallelism  of the destination vertex.
    * @return an optimal PartitionSetProperty and a ResourceAntiAffinityProperty.
    */
   Pair<PartitionSetProperty, ResourceAntiAffinityProperty> analyzeMessage(final Map<Object, Long> keyToCountMap,
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/BasicPullPolicy.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/BasicPullPolicy.java
index c304a1e..d9f063d 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/BasicPullPolicy.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/BasicPullPolicy.java
@@ -29,9 +29,9 @@
  */
 public final class BasicPullPolicy implements Policy {
   public static final PolicyBuilder BUILDER =
-      new PolicyBuilder()
-          .registerCompileTimePass(new AggressiveSpeculativeCloningPass())
-          .registerCompileTimePass(new DefaultScheduleGroupPass());
+    new PolicyBuilder()
+      .registerCompileTimePass(new AggressiveSpeculativeCloningPass())
+      .registerCompileTimePass(new DefaultScheduleGroupPass());
   private final Policy policy;
 
   /**
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/ConditionalLargeShufflePolicy.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/ConditionalLargeShufflePolicy.java
index af89f89..fac310f 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/ConditionalLargeShufflePolicy.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/ConditionalLargeShufflePolicy.java
@@ -30,10 +30,10 @@
  */
 public final class ConditionalLargeShufflePolicy implements Policy {
   public static final PolicyBuilder BUILDER =
-      new PolicyBuilder()
-          .registerCompileTimePass(new LargeShuffleCompositePass(), dag -> getMaxParallelism(dag) > 300)
-          .registerCompileTimePass(new LoopOptimizationCompositePass())
-          .registerCompileTimePass(new DefaultCompositePass());
+    new PolicyBuilder()
+      .registerCompileTimePass(new LargeShuffleCompositePass(), dag -> getMaxParallelism(dag) > 300)
+      .registerCompileTimePass(new LoopOptimizationCompositePass())
+      .registerCompileTimePass(new DefaultCompositePass());
   private final Policy policy;
 
   /**
@@ -45,13 +45,14 @@
 
   /**
    * Returns the maximum parallelism of the vertices of a IR DAG.
+   *
    * @param dag dag to observe.
    * @return the maximum parallelism, or 1 by default.
    */
   private static int getMaxParallelism(final IRDAG dag) {
     return dag.getVertices().stream()
-        .mapToInt(vertex -> vertex.getPropertyValue(ParallelismProperty.class).orElse(1))
-        .max().orElse(1);
+      .mapToInt(vertex -> vertex.getPropertyValue(ParallelismProperty.class).orElse(1))
+      .max().orElse(1);
   }
 
   @Override
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DataSkewPolicy.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DataSkewPolicy.java
index b25ca70..a8ab38e 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DataSkewPolicy.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DataSkewPolicy.java
@@ -31,11 +31,11 @@
  */
 public final class DataSkewPolicy implements Policy {
   public static final PolicyBuilder BUILDER =
-      new PolicyBuilder()
-        .registerCompileTimePass(new DefaultParallelismPass()) // SkewCompositePass relies on parallelism.
-        .registerRunTimePass(new SkewRunTimePass(), new SkewCompositePass())
-        .registerCompileTimePass(new LoopOptimizationCompositePass())
-        .registerCompileTimePass(new DefaultCompositePass());
+    new PolicyBuilder()
+      .registerCompileTimePass(new DefaultParallelismPass()) // SkewCompositePass relies on parallelism.
+      .registerRunTimePass(new SkewRunTimePass(), new SkewCompositePass())
+      .registerCompileTimePass(new LoopOptimizationCompositePass())
+      .registerCompileTimePass(new DefaultCompositePass());
 
   private final Policy policy;
 
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DefaultPolicy.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DefaultPolicy.java
index c1b626e..b4136b4 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DefaultPolicy.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DefaultPolicy.java
@@ -27,8 +27,8 @@
  */
 public final class DefaultPolicy implements Policy {
   public static final PolicyBuilder BUILDER =
-      new PolicyBuilder()
-          .registerCompileTimePass(new DefaultCompositePass());
+    new PolicyBuilder()
+      .registerCompileTimePass(new DefaultCompositePass());
   private final Policy policy;
 
   /**
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DefaultPolicyWithSeparatePass.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DefaultPolicyWithSeparatePass.java
index 8911fab..df7eaf6 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DefaultPolicyWithSeparatePass.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DefaultPolicyWithSeparatePass.java
@@ -34,9 +34,9 @@
  */
 public final class DefaultPolicyWithSeparatePass implements Policy {
   public static final PolicyBuilder BUILDER =
-      new PolicyBuilder()
-          .registerCompileTimePass(new DefaultParallelismPass())
-          .registerCompileTimePass(new RefactoredPass());
+    new PolicyBuilder()
+      .registerCompileTimePass(new DefaultParallelismPass())
+      .registerCompileTimePass(new RefactoredPass());
   private final Policy policy;
 
   /**
@@ -65,8 +65,8 @@
      */
     RefactoredPass() {
       super(Arrays.asList(
-          new DefaultDataStorePass(),
-          new DefaultScheduleGroupPass()
+        new DefaultDataStorePass(),
+        new DefaultScheduleGroupPass()
       ));
     }
   }
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DisaggregationPolicy.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DisaggregationPolicy.java
index 66846c2..2cc4b8a 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DisaggregationPolicy.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/DisaggregationPolicy.java
@@ -29,10 +29,10 @@
  */
 public final class DisaggregationPolicy implements Policy {
   public static final PolicyBuilder BUILDER =
-      new PolicyBuilder()
-          .registerCompileTimePass(new DisaggregationEdgeDataStorePass())
-          .registerCompileTimePass(new LoopOptimizationCompositePass())
-          .registerCompileTimePass(new DefaultCompositePass());
+    new PolicyBuilder()
+      .registerCompileTimePass(new DisaggregationEdgeDataStorePass())
+      .registerCompileTimePass(new LoopOptimizationCompositePass())
+      .registerCompileTimePass(new DefaultCompositePass());
   private final Policy policy;
 
   /**
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/LargeShufflePolicy.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/LargeShufflePolicy.java
index 2644106..5b8e3f1 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/LargeShufflePolicy.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/LargeShufflePolicy.java
@@ -29,10 +29,10 @@
  */
 public final class LargeShufflePolicy implements Policy {
   public static final PolicyBuilder BUILDER =
-      new PolicyBuilder()
-          .registerCompileTimePass(new LargeShuffleCompositePass())
-          .registerCompileTimePass(new LoopOptimizationCompositePass())
-          .registerCompileTimePass(new DefaultCompositePass());
+    new PolicyBuilder()
+      .registerCompileTimePass(new LargeShuffleCompositePass())
+      .registerCompileTimePass(new LoopOptimizationCompositePass())
+      .registerCompileTimePass(new DefaultCompositePass());
   private final Policy policy;
 
   /**
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/Policy.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/Policy.java
index 71b78dd..9d4110d 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/Policy.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/Policy.java
@@ -41,7 +41,7 @@
   /**
    * Optimize the DAG with the run-time optimizations.
    *
-   * @param dag input DAG.
+   * @param dag     input DAG.
    * @param message from the DAG execution.
    */
   IRDAG runRunTimeOptimizations(final IRDAG dag, final Message<?> message);
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/PolicyBuilder.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/PolicyBuilder.java
index 69abb6a..22142e2 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/PolicyBuilder.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/PolicyBuilder.java
@@ -65,6 +65,7 @@
 
   /**
    * Register a compile time pass.
+   *
    * @param compileTimePass the compile time pass to register.
    * @return the PolicyBuilder which registers the compileTimePass.
    */
@@ -79,7 +80,7 @@
     // Check prerequisite execution properties.
     if (!annotatedExecutionProperties.containsAll(compileTimePass.getPrerequisiteExecutionProperties())) {
       throw new CompileTimeOptimizationException("Prerequisite ExecutionProperty hasn't been met for "
-          + compileTimePass.getClass().getSimpleName());
+        + compileTimePass.getClass().getSimpleName());
     }
 
     // check annotation of annotating passes.
@@ -95,8 +96,9 @@
 
   /**
    * Register compile time pass with its condition under which to run the pass.
+   *
    * @param compileTimePass the compile time pass to register.
-   * @param condition condition under which to run the pass.
+   * @param condition       condition under which to run the pass.
    * @return the PolicyBuilder which registers the compileTimePass.
    */
   public PolicyBuilder registerCompileTimePass(final CompileTimePass compileTimePass,
@@ -107,7 +109,8 @@
 
   /**
    * Register a run time pass.
-   * @param runTimePass the runtime pass to register.
+   *
+   * @param runTimePass           the runtime pass to register.
    * @param runTimePassRegisterer the compile time pass that triggers the runtime pass.
    * @return the PolicyBuilder which registers the runTimePass and the runTimePassRegisterer.
    */
@@ -120,9 +123,10 @@
 
   /**
    * Register a run time pass.
-   * @param runTimePass the runtime pass to register.
+   *
+   * @param runTimePass           the runtime pass to register.
    * @param runTimePassRegisterer the compile time pass that triggers the runtime pass.
-   * @param condition condition under which to run the pass.
+   * @param condition             condition under which to run the pass.
    * @return the PolicyBuilder which registers the runTimePass and the runTimePassRegisterer.
    */
   public PolicyBuilder registerRunTimePass(final RunTimePass<?> runTimePass,
@@ -134,6 +138,7 @@
 
   /**
    * Getter for compile time passes.
+   *
    * @return the list of compile time passes.
    */
   public List<CompileTimePass> getCompileTimePasses() {
@@ -142,6 +147,7 @@
 
   /**
    * Getter for run time passes.
+   *
    * @return the list of run time passes.
    */
   public Set<RunTimePass<?>> getRunTimePasses() {
@@ -150,6 +156,7 @@
 
   /**
    * Build a policy using compileTimePasses and runTimePasses in this object.
+   *
    * @return the built Policy.
    */
   public Policy build() {
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/PolicyImpl.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/PolicyImpl.java
index 956e00b..0e8bcb2 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/PolicyImpl.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/PolicyImpl.java
@@ -45,8 +45,9 @@
 
   /**
    * Constructor.
+   *
    * @param compileTimePasses compile time passes of the policy.
-   * @param runTimePasses run time passes of the policy.
+   * @param runTimePasses     run time passes of the policy.
    */
   public PolicyImpl(final List<CompileTimePass> compileTimePasses, final Set<RunTimePass<?>> runTimePasses) {
     this.compileTimePasses = compileTimePasses;
@@ -61,8 +62,9 @@
 
   /**
    * A recursive method to process each pass one-by-one to the given DAG.
-   * @param dag DAG to process.
-   * @param passes passes to apply.
+   *
+   * @param dag          DAG to process.
+   * @param passes       passes to apply.
    * @param dagDirectory directory to save the DAG information.
    * @return the processed DAG.
    * @throws Exception Exceptions on the way.
@@ -116,8 +118,9 @@
   /**
    * Checks if the annotating pass hasn't modified the DAG structure.
    * It checks if the number of Vertices and Edges are the same.
+   *
    * @param before DAG before modification.
-   * @param after DAG after modification.
+   * @param after  DAG after modification.
    * @return true if there is no problem, false if there is a problem.
    */
   private static Boolean checkAnnotatingPass(final IRDAG before, final IRDAG after) {
@@ -159,8 +162,9 @@
   /**
    * Checks if the reshaping pass hasn't modified execution properties.
    * It checks if all of its vertices and edges have the same execution properties as before (if it existed then).
+   *
    * @param before DAG before modification.
-   * @param after DAG after modification.
+   * @param after  DAG after modification.
    * @return true if there is no problem, false if there is a problem.
    */
   private static Boolean checkReshapingPass(final IRDAG before, final IRDAG after) {
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/SamplingLargeShuffleSkewPolicy.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/SamplingLargeShuffleSkewPolicy.java
index 84d9470..6eac660 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/SamplingLargeShuffleSkewPolicy.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/SamplingLargeShuffleSkewPolicy.java
@@ -20,7 +20,9 @@
 
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.compiler.optimizer.pass.compiletime.annotating.DefaultParallelismPass;
-import org.apache.nemo.compiler.optimizer.pass.compiletime.composite.*;
+import org.apache.nemo.compiler.optimizer.pass.compiletime.composite.DefaultCompositePass;
+import org.apache.nemo.compiler.optimizer.pass.compiletime.composite.LargeShuffleCompositePass;
+import org.apache.nemo.compiler.optimizer.pass.compiletime.composite.LoopOptimizationCompositePass;
 import org.apache.nemo.compiler.optimizer.pass.compiletime.reshaping.SamplingSkewReshapingPass;
 import org.apache.nemo.compiler.optimizer.pass.runtime.Message;
 import org.apache.nemo.compiler.optimizer.pass.runtime.SkewRunTimePass;
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/TransientResourcePolicy.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/TransientResourcePolicy.java
index e6d6a59..e059010 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/TransientResourcePolicy.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/TransientResourcePolicy.java
@@ -29,10 +29,10 @@
  */
 public final class TransientResourcePolicy implements Policy {
   public static final PolicyBuilder BUILDER =
-      new PolicyBuilder()
-          .registerCompileTimePass(new TransientResourceCompositePass())
-          .registerCompileTimePass(new LoopOptimizationCompositePass())
-          .registerCompileTimePass(new DefaultCompositePass());
+    new PolicyBuilder()
+      .registerCompileTimePass(new TransientResourceCompositePass())
+      .registerCompileTimePass(new LoopOptimizationCompositePass())
+      .registerCompileTimePass(new DefaultCompositePass());
   private final Policy policy;
 
   /**
diff --git a/compiler/optimizer/src/test/java/org/apache/nemo/compiler/optimizer/policy/PolicyBuilderTest.java b/compiler/optimizer/src/test/java/org/apache/nemo/compiler/optimizer/policy/PolicyBuilderTest.java
index bfb2654..1e1721a 100644
--- a/compiler/optimizer/src/test/java/org/apache/nemo/compiler/optimizer/policy/PolicyBuilderTest.java
+++ b/compiler/optimizer/src/test/java/org/apache/nemo/compiler/optimizer/policy/PolicyBuilderTest.java
@@ -49,9 +49,9 @@
   public void testShouldFailPolicy() {
     try {
       final Policy failPolicy = new PolicyBuilder()
-          .registerCompileTimePass(new TransientResourceCompositePass())
-          .registerCompileTimePass(new DefaultScheduleGroupPass())
-          .build();
+        .registerCompileTimePass(new TransientResourceCompositePass())
+        .registerCompileTimePass(new DefaultScheduleGroupPass())
+        .build();
     } catch (Exception e) { // throw an exception if default execution properties are not set.
       assertTrue(e instanceof CompileTimeOptimizationException);
       assertTrue(e.getMessage().contains("Prerequisite ExecutionProperty hasn't been met"));
diff --git a/compiler/pom.xml b/compiler/pom.xml
index bdd7fce..ad85ef5 100644
--- a/compiler/pom.xml
+++ b/compiler/pom.xml
@@ -17,7 +17,8 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
   <parent>
     <artifactId>nemo-project</artifactId>
diff --git a/compiler/test/pom.xml b/compiler/test/pom.xml
index c12027f..69f3747 100644
--- a/compiler/test/pom.xml
+++ b/compiler/test/pom.xml
@@ -17,71 +17,72 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-compiler</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-compiler</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
 
-    <artifactId>nemo-compiler-test</artifactId>
-    <name>Nemo Compiler Test</name>
+  <artifactId>nemo-compiler-test</artifactId>
+  <name>Nemo Compiler Test</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-conf</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-client</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-compiler-frontend-beam</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-examples-beam</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <version>${junit.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.mockito</groupId>
-            <artifactId>mockito-core</artifactId>
-            <version>${mockito.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.powermock</groupId>
-            <artifactId>powermock-module-junit4</artifactId>
-            <version>${powermock.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.powermock</groupId>
-            <artifactId>powermock-api-mockito2</artifactId>
-            <version>${powermock.version}</version>
-        </dependency>
-      <dependency>
-        <!--
-        This is needed to view the logs when running unit tests.
-        See https://dzone.com/articles/how-configure-slf4j-different for details.
-        -->
-        <groupId>org.slf4j</groupId>
-        <artifactId>slf4j-simple</artifactId>
-        <version>1.6.2</version>
-        <scope>test</scope>
-      </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-conf</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-client</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-compiler-frontend-beam</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-examples-beam</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-core</artifactId>
+      <version>${mockito.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-module-junit4</artifactId>
+      <version>${powermock.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-api-mockito2</artifactId>
+      <version>${powermock.version}</version>
+    </dependency>
+    <dependency>
+      <!--
+      This is needed to view the logs when running unit tests.
+      See https://dzone.com/articles/how-configure-slf4j-different for details.
+      -->
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-simple</artifactId>
+      <version>1.6.2</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
 </project>
diff --git a/compiler/test/src/main/java/org/apache/nemo/compiler/CompilerTestUtil.java b/compiler/test/src/main/java/org/apache/nemo/compiler/CompilerTestUtil.java
index 1ed114a..3f5001a 100644
--- a/compiler/test/src/main/java/org/apache/nemo/compiler/CompilerTestUtil.java
+++ b/compiler/test/src/main/java/org/apache/nemo/compiler/CompilerTestUtil.java
@@ -18,10 +18,10 @@
  */
 package org.apache.nemo.compiler;
 
+import org.apache.nemo.client.JobLauncher;
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.common.test.ArgBuilder;
 import org.apache.nemo.conf.JobConf;
-import org.apache.nemo.client.JobLauncher;
 import org.apache.reef.tang.Configuration;
 import org.apache.reef.tang.Injector;
 import org.apache.reef.tang.Tang;
@@ -45,6 +45,7 @@
   /**
    * Find the root directory of Nemo project, ascending directory hierarchy one by one starting from {@code curDir}.
    * This method distinguishes the root with "LICENSE" file.
+   *
    * @param curDir the current directory
    * @return the absolute path of the root directory
    */
@@ -77,8 +78,8 @@
       userMainMethodArgs = injector.getNamedInstance(JobConf.UserMainArguments.class).split(" ");
     } catch (final Exception e) {
       throw new RuntimeException("An exception occurred while processing configuration for invoking user main. "
-          + "Note: Using compileDAG for multiple times will fail, as compileDAG method enables static method mocking "
-          + "on JobLauncher and because of this Tang may misbehave afterwards.", e);
+        + "Note: Using compileDAG for multiple times will fail, as compileDAG method enables static method mocking "
+        + "on JobLauncher and because of this Tang may misbehave afterwards.", e);
     }
     final Class userMainClass = Class.forName(userMainClassName);
     final Method userMainMethod = userMainClass.getMethod("main", String[].class);
@@ -97,9 +98,9 @@
     final String main = "org.apache.nemo.examples.beam.WordCount";
 
     final ArgBuilder mrArgBuilder = new ArgBuilder()
-        .addJobId("WordCount")
-        .addUserMain(main)
-        .addUserArgs(input, output);
+      .addJobId("WordCount")
+      .addUserMain(main)
+      .addUserArgs(input, output);
     return compileDAG(mrArgBuilder.build());
   }
 
@@ -110,9 +111,9 @@
     final String main = "org.apache.nemo.examples.beam.AlternatingLeastSquare";
 
     final ArgBuilder alsArgBuilder = new ArgBuilder()
-        .addJobId("AlternatingLeastSquare")
-        .addUserMain(main)
-        .addUserArgs(input, numFeatures, numIteration);
+      .addJobId("AlternatingLeastSquare")
+      .addUserMain(main)
+      .addUserArgs(input, numFeatures, numIteration);
     return compileDAG(alsArgBuilder.build());
   }
 
@@ -123,9 +124,9 @@
     final String main = "org.apache.nemo.examples.beam.AlternatingLeastSquareInefficient";
 
     final ArgBuilder alsArgBuilder = new ArgBuilder()
-        .addJobId("AlternatingLeastSquareInefficient")
-        .addUserMain(main)
-        .addUserArgs(input, numFeatures, numIteration);
+      .addJobId("AlternatingLeastSquareInefficient")
+      .addUserMain(main)
+      .addUserArgs(input, numFeatures, numIteration);
     return compileDAG(alsArgBuilder.build());
   }
 
@@ -137,9 +138,9 @@
     final String main = "org.apache.nemo.examples.beam.MultinomialLogisticRegression";
 
     final ArgBuilder mlrArgBuilder = new ArgBuilder()
-        .addJobId("MultinomialLogisticRegression")
-        .addUserMain(main)
-        .addUserArgs(input, numFeatures, numClasses, numIteration);
+      .addJobId("MultinomialLogisticRegression")
+      .addUserMain(main)
+      .addUserArgs(input, numFeatures, numClasses, numIteration);
     return compileDAG(mlrArgBuilder.build());
   }
 }
diff --git a/compiler/test/src/main/java/org/apache/nemo/compiler/optimizer/policy/TestPolicy.java b/compiler/test/src/main/java/org/apache/nemo/compiler/optimizer/policy/TestPolicy.java
index 6b5b8c5..ec03325 100644
--- a/compiler/test/src/main/java/org/apache/nemo/compiler/optimizer/policy/TestPolicy.java
+++ b/compiler/test/src/main/java/org/apache/nemo/compiler/optimizer/policy/TestPolicy.java
@@ -20,10 +20,13 @@
 
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.compiler.optimizer.pass.compiletime.CompileTimePass;
-import org.apache.nemo.compiler.optimizer.pass.compiletime.annotating.*;
+import org.apache.nemo.compiler.optimizer.pass.compiletime.annotating.DefaultScheduleGroupPass;
+import org.apache.nemo.compiler.optimizer.pass.compiletime.annotating.ShuffleEdgePushPass;
 import org.apache.nemo.compiler.optimizer.pass.runtime.Message;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
 
 /**
  * A policy for tests.
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/backend/nemo/DAGConverterTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/backend/nemo/DAGConverterTest.java
index 5a25c79..58367fb 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/backend/nemo/DAGConverterTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/backend/nemo/DAGConverterTest.java
@@ -25,18 +25,18 @@
 import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.DataFlowProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.DataStoreProperty;
-import org.apache.nemo.common.ir.vertex.SourceVertex;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.vertex.OperatorVertex;
+import org.apache.nemo.common.ir.vertex.SourceVertex;
 import org.apache.nemo.common.ir.vertex.executionproperty.ParallelismProperty;
 import org.apache.nemo.common.ir.vertex.executionproperty.ResourcePriorityProperty;
 import org.apache.nemo.common.ir.vertex.transform.Transform;
 import org.apache.nemo.common.test.EmptyComponents;
+import org.apache.nemo.compiler.optimizer.policy.TestPolicy;
 import org.apache.nemo.conf.JobConf;
 import org.apache.nemo.runtime.common.plan.PhysicalPlanGenerator;
 import org.apache.nemo.runtime.common.plan.Stage;
 import org.apache.nemo.runtime.common.plan.StageEdge;
-import org.apache.nemo.compiler.optimizer.policy.TestPolicy;
 import org.apache.reef.tang.Injector;
 import org.apache.reef.tang.Tang;
 import org.junit.Before;
@@ -77,7 +77,7 @@
     irDAGBuilder.connectVertices(e);
 
     final IRDAG irDAG = new TestPolicy().runCompileTimeOptimization(
-        new IRDAG(irDAGBuilder.buildWithoutSourceSinkCheck()), DAG.EMPTY_DAG_DIRECTORY);
+      new IRDAG(irDAGBuilder.buildWithoutSourceSinkCheck()), DAG.EMPTY_DAG_DIRECTORY);
     final DAG<Stage, StageEdge> DAGOfStages = physicalPlanGenerator.stagePartitionIrDAG(irDAG);
     final DAG<Stage, StageEdge> physicalDAG = physicalPlanGenerator.apply(irDAG);
 
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/BeamFrontendALSTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/BeamFrontendALSTest.java
index 5be2bcb..24cfcc9 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/BeamFrontendALSTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/BeamFrontendALSTest.java
@@ -19,9 +19,7 @@
 package org.apache.nemo.compiler.frontend.beam;
 
 import org.apache.nemo.client.JobLauncher;
-import org.apache.nemo.common.dag.DAG;
 import org.apache.nemo.common.ir.IRDAG;
-import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.compiler.CompilerTestUtil;
 import org.junit.Test;
@@ -44,7 +42,7 @@
     assertEquals(producedDAG.getTopologicalSort(), producedDAG.getTopologicalSort());
     assertEquals(44, producedDAG.getVertices().size());
 
-//    producedDAG.getTopologicalSort().forEach(v -> System.out.println(v.getId()));
+    //    producedDAG.getTopologicalSort().forEach(v -> System.out.println(v.getId()));
     final IRVertex vertexX = producedDAG.getTopologicalSort().get(5);
     assertEquals(1, producedDAG.getIncomingEdgesOf(vertexX).size());
     assertEquals(1, producedDAG.getIncomingEdgesOf(vertexX.getId()).size());
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/BeamFrontendMLRTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/BeamFrontendMLRTest.java
index 231d9ea..d250485 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/BeamFrontendMLRTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/BeamFrontendMLRTest.java
@@ -19,9 +19,7 @@
 package org.apache.nemo.compiler.frontend.beam;
 
 import org.apache.nemo.client.JobLauncher;
-import org.apache.nemo.common.dag.DAG;
 import org.apache.nemo.common.ir.IRDAG;
-import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.compiler.CompilerTestUtil;
 import org.junit.Test;
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/CreateViewTransformTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/CreateViewTransformTest.java
index 702ca9d..c04875d 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/CreateViewTransformTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/CreateViewTransformTest.java
@@ -21,7 +21,6 @@
 import org.apache.beam.sdk.transforms.Materialization;
 import org.apache.beam.sdk.transforms.Materializations;
 import org.apache.beam.sdk.transforms.ViewFn;
-import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.transforms.windowing.FixedWindows;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.util.WindowedValue;
@@ -32,7 +31,7 @@
 import org.joda.time.Instant;
 import org.junit.Test;
 
-import java.util.*;
+import java.util.Arrays;
 
 import static org.junit.Assert.assertEquals;
 import static org.mockito.Mockito.mock;
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/DoFnTransformTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/DoFnTransformTest.java
index fa1169c..f343f4d 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/DoFnTransformTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/DoFnTransformTest.java
@@ -43,9 +43,7 @@
 
 import java.util.*;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.*;
 import static org.mockito.Mockito.mock;
 
 public final class DoFnTransformTest {
@@ -342,6 +340,7 @@
 
   /**
    * Identitiy do fn.
+   *
    * @param <T> type
    */
   private static class IdentityDoFn<T> extends DoFn<T, T> {
@@ -354,6 +353,7 @@
 
   /**
    * Side input do fn.
+   *
    * @param <T> type
    */
   private static class SimpleSideInputDoFn<T> extends DoFn<T, String> {
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/GroupByKeyAndWindowDoFnTransformTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/GroupByKeyAndWindowDoFnTransformTest.java
index f0749c0..4e51525 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/GroupByKeyAndWindowDoFnTransformTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/GroupByKeyAndWindowDoFnTransformTest.java
@@ -38,9 +38,7 @@
 
 import java.util.*;
 
-import static org.apache.beam.sdk.transforms.windowing.PaneInfo.Timing.EARLY;
-import static org.apache.beam.sdk.transforms.windowing.PaneInfo.Timing.LATE;
-import static org.apache.beam.sdk.transforms.windowing.PaneInfo.Timing.ON_TIME;
+import static org.apache.beam.sdk.transforms.windowing.PaneInfo.Timing.*;
 import static org.apache.beam.sdk.values.WindowingStrategy.AccumulationMode.ACCUMULATING_FIRED_PANES;
 import static org.junit.Assert.assertEquals;
 import static org.mockito.Mockito.mock;
@@ -175,7 +173,7 @@
     doFnTransform.onWatermark(watermark2);
 
     assertEquals(0, oc.outputs.size()); // do not emit anything
-   assertEquals(1, oc.watermarks.size());
+    assertEquals(1, oc.watermarks.size());
 
     // check output watermark
     assertEquals(1400,
@@ -284,7 +282,7 @@
         outputTag,
         WindowingStrategy.of(window).withTrigger(trigger)
           .withMode(ACCUMULATING_FIRED_PANES)
-        .withAllowedLateness(lateness),
+          .withAllowedLateness(lateness),
         PipelineOptionsFactory.as(NemoPipelineOptions.class),
         SystemReduceFn.buffering(NULL_INPUT_CODER),
         DisplayData.none());
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/TestOutputCollector.java b/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/TestOutputCollector.java
index 9abff71..50f64df 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/TestOutputCollector.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/frontend/beam/transform/TestOutputCollector.java
@@ -28,6 +28,7 @@
 
 /**
  * Test output collector that collects data and watermarks.
+ *
  * @param <T>
  */
 final class TestOutputCollector<T> implements OutputCollector<WindowedValue<T>> {
@@ -43,8 +44,8 @@
 
   @Override
   public void emit(WindowedValue<T> output) {
-      outputs.add(output);
-    }
+    outputs.add(output);
+  }
 
   @Override
   public void emitWatermark(Watermark watermark) {
@@ -64,6 +65,6 @@
   }
 
   public List<Tuple<String, WindowedValue<T>>> getTaggedOutputs() {
-      return taggedOutputs;
-    }
+    return taggedOutputs;
+  }
 }
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultEdgeCoderPassTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultEdgeCoderPassTest.java
index 88343af..8626222 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultEdgeCoderPassTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultEdgeCoderPassTest.java
@@ -21,12 +21,10 @@
 import org.apache.nemo.client.JobLauncher;
 import org.apache.nemo.common.coder.DecoderFactory;
 import org.apache.nemo.common.coder.EncoderFactory;
-import org.apache.nemo.common.dag.DAG;
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.edge.executionproperty.DecoderProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.EncoderProperty;
-import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.compiler.CompilerTestUtil;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultParallelismPassTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultParallelismPassTest.java
index b92df25..ae2d2e6 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultParallelismPassTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultParallelismPassTest.java
@@ -56,7 +56,7 @@
     final IRDAG processedDAG = new DefaultParallelismPass().apply(compiledDAG);
 
     processedDAG.getTopologicalSort().forEach(irVertex ->
-        assertEquals(1, irVertex.getPropertyValue(ParallelismProperty.class).get().longValue()));
+      assertEquals(1, irVertex.getPropertyValue(ParallelismProperty.class).get().longValue()));
   }
 
   @Test
@@ -65,8 +65,8 @@
     final IRDAG processedDAG = new DefaultParallelismPass(desiredSourceParallelism, 2).apply(compiledDAG);
 
     processedDAG.getTopologicalSort().stream()
-        .filter(irVertex -> irVertex instanceof SourceVertex)
-        .forEach(irVertex -> assertEquals(desiredSourceParallelism,
-            irVertex.getPropertyValue(ParallelismProperty.class).get().longValue()));
+      .filter(irVertex -> irVertex instanceof SourceVertex)
+      .forEach(irVertex -> assertEquals(desiredSourceParallelism,
+        irVertex.getPropertyValue(ParallelismProperty.class).get().longValue()));
   }
 }
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultScheduleGroupPassTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultScheduleGroupPassTest.java
index e5b4b10..cd28467 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultScheduleGroupPassTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/annotating/DefaultScheduleGroupPassTest.java
@@ -29,8 +29,6 @@
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.vertex.OperatorVertex;
 import org.apache.nemo.common.ir.vertex.executionproperty.ScheduleGroupProperty;
-import org.apache.nemo.compiler.CompilerTestUtil;
-import org.apache.nemo.compiler.optimizer.policy.TestPolicy;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.powermock.core.classloader.annotations.PrepareForTest;
@@ -39,9 +37,7 @@
 import java.util.*;
 
 import static org.apache.nemo.common.test.EmptyComponents.EMPTY_TRANSFORM;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.*;
 
 /**
  * Test {@link DefaultScheduleGroupPass}.
@@ -58,17 +54,17 @@
   /**
    * Return a DAG that has a branch.
    * {@literal
-   *           /-- v3 --- v4
+   * /-- v3 --- v4
    * v0 --- v1 --- v2 --/
    * }
    *
    * @param communicationPattern {@link CommunicationPatternProperty.Value} for the edges
-   * @param dataFlowModel {@link DataFlowProperty.Value} for the edges
+   * @param dataFlowModel        {@link DataFlowProperty.Value} for the edges
    * @return a {@link Pair} of {@link DAG} and {@link List} of {@link IRVertex}
    */
   private static Pair<IRDAG, List<IRVertex>> generateBranchDAG(
-      final CommunicationPatternProperty.Value communicationPattern,
-      final DataFlowProperty.Value dataFlowModel) {
+    final CommunicationPatternProperty.Value communicationPattern,
+    final DataFlowProperty.Value dataFlowModel) {
     final DAGBuilder<IRVertex, IREdge> dagBuilder = new DAGBuilder<>();
 
     final IRVertex v0 = new OperatorVertex(EMPTY_TRANSFORM);
@@ -102,12 +98,12 @@
    * }
    *
    * @param communicationPattern {@link CommunicationPatternProperty.Value} for the edges
-   * @param dataFlowModel {@link DataFlowProperty.Value} for the edges
+   * @param dataFlowModel        {@link DataFlowProperty.Value} for the edges
    * @return a {@link Pair} of {@link DAG} and {@link List} of {@link IRVertex}
    */
   private static Pair<IRDAG, List<IRVertex>> generateJoinDAG(
-      final CommunicationPatternProperty.Value communicationPattern,
-      final DataFlowProperty.Value dataFlowModel) {
+    final CommunicationPatternProperty.Value communicationPattern,
+    final DataFlowProperty.Value dataFlowModel) {
     final DAGBuilder<IRVertex, IREdge> dagBuilder = new DAGBuilder<>();
 
     final IRVertex v0 = new OperatorVertex(EMPTY_TRANSFORM);
@@ -136,8 +132,9 @@
 
   /**
    * Asserts that the {@link ScheduleGroupProperty} is equal to {@code expected}.
+   *
    * @param expected the expected property value
-   * @param vertex the vertex to test
+   * @param vertex   the vertex to test
    */
   private static void assertScheduleGroup(final int expected, final IRVertex vertex) {
     assertEquals(expected, getScheduleGroup(vertex));
@@ -149,11 +146,12 @@
    */
   private static int getScheduleGroup(final IRVertex vertex) {
     return vertex.getPropertyValue(ScheduleGroupProperty.class)
-        .orElseThrow(() -> new RuntimeException(String.format("ScheduleGroup not set for %s", vertex.getId())));
+      .orElseThrow(() -> new RuntimeException(String.format("ScheduleGroup not set for %s", vertex.getId())));
   }
 
   /**
    * Ensures that all vertices in {@code vertices} have different {@link ScheduleGroupProperty} value.
+   *
    * @param vertices vertices to test
    */
   private static void assertDifferentScheduleGroup(final Collection<IRVertex> vertices) {
@@ -173,7 +171,7 @@
   public void testBranch() {
     final DefaultScheduleGroupPass pass = new DefaultScheduleGroupPass();
     final Pair<IRDAG, List<IRVertex>> dag
-        = generateBranchDAG(CommunicationPatternProperty.Value.OneToOne, DataFlowProperty.Value.Push);
+      = generateBranchDAG(CommunicationPatternProperty.Value.OneToOne, DataFlowProperty.Value.Push);
     pass.apply(dag.left());
     dag.right().forEach(v -> assertScheduleGroup(0, v));
   }
@@ -186,7 +184,7 @@
   public void testBranchWhenMultipleInEdgeNotAllowed() {
     final DefaultScheduleGroupPass pass = new DefaultScheduleGroupPass(false, false, false);
     final Pair<IRDAG, List<IRVertex>> dag
-        = generateBranchDAG(CommunicationPatternProperty.Value.OneToOne, DataFlowProperty.Value.Pull);
+      = generateBranchDAG(CommunicationPatternProperty.Value.OneToOne, DataFlowProperty.Value.Pull);
     pass.apply(dag.left());
     dag.right().subList(0, 4).forEach(v -> assertScheduleGroup(0, v));
     dag.right().subList(4, 5).forEach(v -> assertScheduleGroup(1, v));
@@ -200,7 +198,7 @@
   public void testBranchWithPush() {
     final DefaultScheduleGroupPass pass = new DefaultScheduleGroupPass(false, false, false);
     final Pair<IRDAG, List<IRVertex>> dag
-        = generateBranchDAG(CommunicationPatternProperty.Value.Shuffle, DataFlowProperty.Value.Push);
+      = generateBranchDAG(CommunicationPatternProperty.Value.Shuffle, DataFlowProperty.Value.Push);
     pass.apply(dag.left());
     dag.right().forEach(v -> assertScheduleGroup(0, v));
   }
@@ -213,7 +211,7 @@
   public void testBranchWithBroadcast() {
     final DefaultScheduleGroupPass pass = new DefaultScheduleGroupPass(false, true, true);
     final Pair<IRDAG, List<IRVertex>> dag
-        = generateBranchDAG(CommunicationPatternProperty.Value.BroadCast, DataFlowProperty.Value.Pull);
+      = generateBranchDAG(CommunicationPatternProperty.Value.BroadCast, DataFlowProperty.Value.Pull);
     assertDifferentScheduleGroup(pass.apply(dag.left()).getVertices());
   }
 
@@ -225,7 +223,7 @@
   public void testBranchWithShuffle() {
     final DefaultScheduleGroupPass pass = new DefaultScheduleGroupPass(true, false, true);
     final Pair<IRDAG, List<IRVertex>> dag
-        = generateBranchDAG(CommunicationPatternProperty.Value.Shuffle, DataFlowProperty.Value.Pull);
+      = generateBranchDAG(CommunicationPatternProperty.Value.Shuffle, DataFlowProperty.Value.Pull);
     assertDifferentScheduleGroup(pass.apply(dag.left()).getVertices());
   }
 
@@ -237,7 +235,7 @@
   public void testJoin() {
     final DefaultScheduleGroupPass pass = new DefaultScheduleGroupPass();
     final Pair<IRDAG, List<IRVertex>> dag
-        = generateJoinDAG(CommunicationPatternProperty.Value.OneToOne, DataFlowProperty.Value.Pull);
+      = generateJoinDAG(CommunicationPatternProperty.Value.OneToOne, DataFlowProperty.Value.Pull);
     pass.apply(dag.left());
     final int idxForFirstScheduleGroup = getScheduleGroup(dag.right().get(0));
     final int idxForSecondScheduleGroup = getScheduleGroup(dag.right().get(2));
@@ -254,7 +252,7 @@
   public void testJoinWithPush() {
     final DefaultScheduleGroupPass pass = new DefaultScheduleGroupPass();
     final Pair<IRDAG, List<IRVertex>> dag
-        = generateJoinDAG(CommunicationPatternProperty.Value.OneToOne, DataFlowProperty.Value.Push);
+      = generateJoinDAG(CommunicationPatternProperty.Value.OneToOne, DataFlowProperty.Value.Push);
     pass.apply(dag.left());
     dag.right().forEach(v -> assertScheduleGroup(0, v));
   }
@@ -267,9 +265,9 @@
   public void testJoinWithSinglePush() {
     final DefaultScheduleGroupPass pass = new DefaultScheduleGroupPass();
     final Pair<IRDAG, List<IRVertex>> dag
-        = generateJoinDAG(CommunicationPatternProperty.Value.OneToOne, DataFlowProperty.Value.Push);
+      = generateJoinDAG(CommunicationPatternProperty.Value.OneToOne, DataFlowProperty.Value.Push);
     dag.left().getOutgoingEdgesOf(dag.right().get(1)).iterator().next()
-        .setProperty(DataFlowProperty.of(DataFlowProperty.Value.Pull));
+      .setProperty(DataFlowProperty.of(DataFlowProperty.Value.Pull));
     pass.apply(dag.left());
     final int idxForFirstScheduleGroup = getScheduleGroup(dag.right().get(0));
     final int idxForSecondScheduleGroup = getScheduleGroup(dag.right().get(2));
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/DisaggregationPassTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/DisaggregationPassTest.java
index 139bfd4..ada485f 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/DisaggregationPassTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/DisaggregationPassTest.java
@@ -22,9 +22,9 @@
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.common.ir.edge.executionproperty.DataStoreProperty;
 import org.apache.nemo.compiler.CompilerTestUtil;
+import org.apache.nemo.compiler.optimizer.pass.compiletime.annotating.DefaultDataStorePass;
 import org.apache.nemo.compiler.optimizer.pass.compiletime.annotating.DefaultParallelismPass;
 import org.apache.nemo.compiler.optimizer.pass.compiletime.annotating.DisaggregationEdgeDataStorePass;
-import org.apache.nemo.compiler.optimizer.pass.compiletime.annotating.DefaultDataStorePass;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -49,13 +49,13 @@
   @Test
   public void testDisaggregation() throws Exception {
     final IRDAG processedDAG =
-        new DisaggregationEdgeDataStorePass().apply(
-            new DefaultDataStorePass().apply(
-                  new DefaultParallelismPass().apply(compiledDAG)));
+      new DisaggregationEdgeDataStorePass().apply(
+        new DefaultDataStorePass().apply(
+          new DefaultParallelismPass().apply(compiledDAG)));
 
     processedDAG.getTopologicalSort().forEach(irVertex ->
       processedDAG.getIncomingEdgesOf(irVertex).forEach(edgeToMerger ->
-          assertEquals(DataStoreProperty.Value.GlusterFileStore,
-              edgeToMerger.getPropertyValue(DataStoreProperty.class).get())));
+        assertEquals(DataStoreProperty.Value.GlusterFileStore,
+          edgeToMerger.getPropertyValue(DataStoreProperty.class).get())));
   }
 }
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/LargeShuffleCompositePassTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/LargeShuffleCompositePassTest.java
index 2867e23..202c4b7 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/LargeShuffleCompositePassTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/LargeShuffleCompositePassTest.java
@@ -51,44 +51,44 @@
 
     processedDAG.getTopologicalSort().forEach(irVertex -> {
       if (processedDAG.getIncomingEdgesOf(irVertex).stream().anyMatch(irEdge ->
-              CommunicationPatternProperty.Value.Shuffle
+        CommunicationPatternProperty.Value.Shuffle
           .equals(irEdge.getPropertyValue(CommunicationPatternProperty.class).get()))) {
         // Relay vertex
         processedDAG.getIncomingEdgesOf(irVertex).forEach(edgeToMerger -> {
           if (CommunicationPatternProperty.Value.Shuffle
-          .equals(edgeToMerger.getPropertyValue(CommunicationPatternProperty.class).get())) {
+            .equals(edgeToMerger.getPropertyValue(CommunicationPatternProperty.class).get())) {
             assertEquals(DataFlowProperty.Value.Push,
-                edgeToMerger.getPropertyValue(DataFlowProperty.class).get());
+              edgeToMerger.getPropertyValue(DataFlowProperty.class).get());
             assertEquals(DataPersistenceProperty.Value.Discard,
-                edgeToMerger.getPropertyValue(DataPersistenceProperty.class).get());
+              edgeToMerger.getPropertyValue(DataPersistenceProperty.class).get());
             assertEquals(DataStoreProperty.Value.SerializedMemoryStore,
-                edgeToMerger.getPropertyValue(DataStoreProperty.class).get());
+              edgeToMerger.getPropertyValue(DataStoreProperty.class).get());
             assertEquals(BytesDecoderFactory.of(),
-                edgeToMerger.getPropertyValue(DecoderProperty.class).get());
+              edgeToMerger.getPropertyValue(DecoderProperty.class).get());
             assertEquals(CompressionProperty.Value.LZ4,
-                edgeToMerger.getPropertyValue(CompressionProperty.class).get());
+              edgeToMerger.getPropertyValue(CompressionProperty.class).get());
             assertEquals(CompressionProperty.Value.None,
-                edgeToMerger.getPropertyValue(DecompressionProperty.class).get());
+              edgeToMerger.getPropertyValue(DecompressionProperty.class).get());
           } else {
             assertEquals(DataFlowProperty.Value.Pull,
-                edgeToMerger.getPropertyValue(DataFlowProperty.class).get());
+              edgeToMerger.getPropertyValue(DataFlowProperty.class).get());
           }
         });
         processedDAG.getOutgoingEdgesOf(irVertex).forEach(edgeFromMerger -> {
           assertEquals(DataFlowProperty.Value.Pull,
-              edgeFromMerger.getPropertyValue(DataFlowProperty.class).get());
+            edgeFromMerger.getPropertyValue(DataFlowProperty.class).get());
           assertEquals(CommunicationPatternProperty.Value.OneToOne,
-              edgeFromMerger.getPropertyValue(CommunicationPatternProperty.class).get());
+            edgeFromMerger.getPropertyValue(CommunicationPatternProperty.class).get());
           assertEquals(DataStoreProperty.Value.LocalFileStore,
-              edgeFromMerger.getPropertyValue(DataStoreProperty.class).get());
+            edgeFromMerger.getPropertyValue(DataStoreProperty.class).get());
           assertEquals(BytesEncoderFactory.of(),
-              edgeFromMerger.getPropertyValue(EncoderProperty.class).get());
+            edgeFromMerger.getPropertyValue(EncoderProperty.class).get());
           assertEquals(PartitionerProperty.Type.DedicatedKeyPerElement,
-              edgeFromMerger.getPropertyValue(PartitionerProperty.class).get().left());
+            edgeFromMerger.getPropertyValue(PartitionerProperty.class).get().left());
           assertEquals(CompressionProperty.Value.None,
-              edgeFromMerger.getPropertyValue(CompressionProperty.class).get());
+            edgeFromMerger.getPropertyValue(CompressionProperty.class).get());
           assertEquals(CompressionProperty.Value.LZ4,
-              edgeFromMerger.getPropertyValue(DecompressionProperty.class).get());
+            edgeFromMerger.getPropertyValue(DecompressionProperty.class).get());
         });
       }
     });
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/SkewCompositePassTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/SkewCompositePassTest.java
index 03e8d29..c2d51d1 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/SkewCompositePassTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/SkewCompositePassTest.java
@@ -21,8 +21,8 @@
 import org.apache.nemo.client.JobLauncher;
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
-import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.executionproperty.ExecutionProperty;
+import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.vertex.OperatorVertex;
 import org.apache.nemo.common.ir.vertex.executionproperty.ResourceAntiAffinityProperty;
 import org.apache.nemo.common.ir.vertex.transform.MessageAggregatorTransform;
@@ -63,7 +63,7 @@
     final CompositePass dataSkewPass = new SkewCompositePass();
     final Set<Class<? extends ExecutionProperty>> prerequisites = new HashSet<>();
     dataSkewPass.getPassList().forEach(compileTimePass ->
-        prerequisites.addAll(compileTimePass.getPrerequisiteExecutionProperties()));
+      prerequisites.addAll(compileTimePass.getPrerequisiteExecutionProperties()));
     dataSkewPass.getPassList().forEach(compileTimePass -> {
       if (compileTimePass instanceof AnnotatingPass) {
         prerequisites.removeAll(((AnnotatingPass) compileTimePass).getExecutionPropertiesToAnnotate());
@@ -76,6 +76,7 @@
    * Test for {@link SkewCompositePass} with MR workload.
    * It should have inserted vertex with {@link MessageBarrierTransform}
    * and vertex with {@link MessageAggregatorTransform} for each shuffle edge.
+   *
    * @throws Exception exception on the way.
    */
   @Test
@@ -88,7 +89,7 @@
         mrDAG.getIncomingEdgesOf(irVertex).stream().anyMatch(irEdge ->
           CommunicationPatternProperty.Value.Shuffle
             .equals(irEdge.getPropertyValue(CommunicationPatternProperty.class).get())))
-      .count();
+        .count();
 
     final IRDAG processedDAG = new SkewCompositePass().apply(new DefaultParallelismPass().apply(mrDAG));
     assertEquals(originalVerticesNum + numOfShuffleEdges * 2, processedDAG.getVertices().size());
@@ -96,13 +97,13 @@
     processedDAG.filterVertices(v -> v instanceof OperatorVertex
       && ((OperatorVertex) v).getTransform() instanceof MessageBarrierTransform)
       .forEach(metricV -> {
-          final List<IRVertex> reducerV = processedDAG.getChildren(metricV.getId());
-          reducerV.forEach(rV -> {
-            if (rV instanceof OperatorVertex &&
-              !(((OperatorVertex) rV).getTransform() instanceof MessageAggregatorTransform)) {
-              assertTrue(rV.getPropertyValue(ResourceAntiAffinityProperty.class).isPresent());
-            }
-          });
+        final List<IRVertex> reducerV = processedDAG.getChildren(metricV.getId());
+        reducerV.forEach(rV -> {
+          if (rV instanceof OperatorVertex &&
+            !(((OperatorVertex) rV).getTransform() instanceof MessageAggregatorTransform)) {
+            assertTrue(rV.getPropertyValue(ResourceAntiAffinityProperty.class).isPresent());
+          }
+        });
       });
   }
 }
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/TransientResourceCompositePassTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/TransientResourceCompositePassTest.java
index d4f2b42..899053a 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/TransientResourceCompositePassTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/composite/TransientResourceCompositePassTest.java
@@ -19,9 +19,7 @@
 package org.apache.nemo.compiler.optimizer.pass.compiletime.composite;
 
 import org.apache.nemo.client.JobLauncher;
-import org.apache.nemo.common.dag.DAG;
 import org.apache.nemo.common.ir.IRDAG;
-import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.edge.executionproperty.DataFlowProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.DataStoreProperty;
 import org.apache.nemo.common.ir.vertex.IRVertex;
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/CommonSubexpressionEliminationPassTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/CommonSubexpressionEliminationPassTest.java
index 83f7787..7510505 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/CommonSubexpressionEliminationPassTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/CommonSubexpressionEliminationPassTest.java
@@ -19,13 +19,12 @@
 package org.apache.nemo.compiler.optimizer.pass.compiletime.reshaping;
 
 import org.apache.nemo.client.JobLauncher;
-import org.apache.nemo.common.dag.DAG;
+import org.apache.nemo.common.dag.DAGBuilder;
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.vertex.OperatorVertex;
-import org.apache.nemo.common.dag.DAGBuilder;
 import org.apache.nemo.common.test.EmptyComponents;
 import org.junit.Before;
 import org.junit.Test;
@@ -59,18 +58,18 @@
   public void setUp() {
     final DAGBuilder<IRVertex, IREdge> dagBuilder = new DAGBuilder<>();
     dagNotToOptimize = new IRDAG(dagBuilder.addVertex(source).addVertex(map1).addVertex(groupByKey).addVertex(combine)
-        .addVertex(map2)
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, source, map1))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.Shuffle, map1, groupByKey))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, groupByKey, combine))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, combine, map2))
-        .build());
+      .addVertex(map2)
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, source, map1))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.Shuffle, map1, groupByKey))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, groupByKey, combine))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, combine, map2))
+      .build());
     dagToOptimize = new IRDAG(dagBuilder.addVertex(map1clone).addVertex(groupByKey2).addVertex(combine2).addVertex(map22)
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, source, map1clone))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.Shuffle, map1clone, groupByKey2))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, groupByKey2, combine2))
-        .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, combine2, map22))
-        .build());
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, source, map1clone))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.Shuffle, map1clone, groupByKey2))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, groupByKey2, combine2))
+      .connectVertices(new IREdge(CommunicationPatternProperty.Value.OneToOne, combine2, map22))
+      .build());
   }
 
   @Test
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopExtractionPassTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopExtractionPassTest.java
index d6b0768..b620a59 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopExtractionPassTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopExtractionPassTest.java
@@ -19,10 +19,7 @@
 package org.apache.nemo.compiler.optimizer.pass.compiletime.reshaping;
 
 import org.apache.nemo.client.JobLauncher;
-import org.apache.nemo.common.dag.DAG;
 import org.apache.nemo.common.ir.IRDAG;
-import org.apache.nemo.common.ir.edge.IREdge;
-import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.compiler.CompilerTestUtil;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopInvariantCodeMotionALSInefficientTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopInvariantCodeMotionALSInefficientTest.java
index 8c953fc..710d490 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopInvariantCodeMotionALSInefficientTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopInvariantCodeMotionALSInefficientTest.java
@@ -19,10 +19,7 @@
 package org.apache.nemo.compiler.optimizer.pass.compiletime.reshaping;
 
 import org.apache.nemo.client.JobLauncher;
-import org.apache.nemo.common.dag.DAG;
 import org.apache.nemo.common.ir.IRDAG;
-import org.apache.nemo.common.ir.edge.IREdge;
-import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.compiler.CompilerTestUtil;
 import org.junit.Before;
 import org.junit.Test;
@@ -52,7 +49,7 @@
     final long expectedNumOfVertices = groupedDAG.getVertices().size() + 3;
 
     final IRDAG processedDAG = LoopOptimizations.getLoopInvariantCodeMotionPass()
-        .apply(groupedDAG);
+      .apply(groupedDAG);
     assertEquals(expectedNumOfVertices, processedDAG.getVertices().size());
   }
 
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopInvariantCodeMotionPassTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopInvariantCodeMotionPassTest.java
index db2e7c5..6dd5123 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopInvariantCodeMotionPassTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopInvariantCodeMotionPassTest.java
@@ -19,7 +19,6 @@
 package org.apache.nemo.compiler.optimizer.pass.compiletime.reshaping;
 
 import org.apache.nemo.client.JobLauncher;
-import org.apache.nemo.common.dag.DAG;
 import org.apache.nemo.common.dag.DAGBuilder;
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.common.ir.edge.IREdge;
@@ -59,7 +58,7 @@
     groupedDAG = new LoopExtractionPass().apply(originalALSDAG);
 
     final Optional<LoopVertex> alsLoopOpt = groupedDAG.getTopologicalSort().stream()
-        .filter(irVertex -> irVertex instanceof LoopVertex).map(irVertex -> (LoopVertex) irVertex).findFirst();
+      .filter(irVertex -> irVertex instanceof LoopVertex).map(irVertex -> (LoopVertex) irVertex).findFirst();
     assertTrue(alsLoopOpt.isPresent());
     final LoopVertex alsLoop = alsLoopOpt.get();
 
@@ -94,7 +93,7 @@
             final Optional<IREdge> incomingEdge = newDAGIncomingEdge.stream().findFirst();
             assertTrue(incomingEdge.isPresent());
             final IREdge newIREdge = new IREdge(incomingEdge.get().getPropertyValue(
-                CommunicationPatternProperty.class).get(), incomingEdge.get().getSrc(), alsLoop);
+              CommunicationPatternProperty.class).get(), incomingEdge.get().getSrc(), alsLoop);
             newIREdge.setProperty(EncoderProperty.of(incomingEdge.get().getPropertyValue(EncoderProperty.class).get()));
             newIREdge.setProperty(DecoderProperty.of(incomingEdge.get().getPropertyValue(DecoderProperty.class).get()));
             builder.connectVertices(newIREdge);
@@ -111,11 +110,11 @@
     final long numberOfGroupedVertices = groupedDAG.getVertices().size();
 
     final IRDAG processedDAG = LoopOptimizations.getLoopInvariantCodeMotionPass()
-        .apply(dagToBeRefactored);
+      .apply(dagToBeRefactored);
     assertEquals(numberOfGroupedVertices, processedDAG.getVertices().size());
 
     final IRDAG notProcessedDAG = LoopOptimizations.getLoopInvariantCodeMotionPass()
-        .apply(groupedDAG);
+      .apply(groupedDAG);
     assertEquals(numberOfGroupedVertices, notProcessedDAG.getVertices().size());
   }
 
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopUnrollingPassTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopUnrollingPassTest.java
index 5c93ce5..1711213 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopUnrollingPassTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/compiletime/reshaping/LoopUnrollingPassTest.java
@@ -20,9 +20,7 @@
 
 import org.apache.nemo.client.JobLauncher;
 import org.apache.nemo.common.Pair;
-import org.apache.nemo.common.dag.DAG;
 import org.apache.nemo.common.ir.IRDAG;
-import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.compiler.CompilerTestUtil;
 import org.junit.Before;
@@ -53,20 +51,20 @@
   @Test
   public void testLoopUnrollingPass() throws Exception {
     final IRDAG processedDAG =
-        new LoopUnrollingPass().apply(new LoopExtractionPass().apply(compiledDAG));
+      new LoopUnrollingPass().apply(new LoopExtractionPass().apply(compiledDAG));
 
     assertEquals(compiledDAG.getTopologicalSort().size(), processedDAG.getTopologicalSort().size());
     // zip vertices
     final Iterator<IRVertex> vertices1 = compiledDAG.getTopologicalSort().iterator();
     final Iterator<IRVertex> vertices2 = processedDAG.getTopologicalSort().iterator();
     final List<Pair<IRVertex, IRVertex>> list = new ArrayList<>();
-    while  (vertices1.hasNext() && vertices2.hasNext()) {
+    while (vertices1.hasNext() && vertices2.hasNext()) {
       list.add(Pair.of(vertices1.next(), vertices2.next()));
     }
     list.forEach(irVertexPair -> {
-        assertEquals(irVertexPair.left().getExecutionProperties(), irVertexPair.right().getExecutionProperties());
-        assertEquals(compiledDAG.getIncomingEdgesOf(irVertexPair.left()).size(),
-            processedDAG.getIncomingEdgesOf(irVertexPair.right()).size());
+      assertEquals(irVertexPair.left().getExecutionProperties(), irVertexPair.right().getExecutionProperties());
+      assertEquals(compiledDAG.getIncomingEdgesOf(irVertexPair.left()).size(),
+        processedDAG.getIncomingEdgesOf(irVertexPair.right()).size());
     });
   }
 }
diff --git a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/runtime/SkewRuntimePassTest.java b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/runtime/SkewRuntimePassTest.java
index de00e16..81689f6 100644
--- a/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/runtime/SkewRuntimePassTest.java
+++ b/compiler/test/src/test/java/org/apache/nemo/compiler/optimizer/pass/runtime/SkewRuntimePassTest.java
@@ -18,13 +18,12 @@
  */
 package org.apache.nemo.compiler.optimizer.pass.runtime;
 
-import org.apache.nemo.common.KeyRange;
 import org.apache.nemo.common.KeyExtractor;
+import org.apache.nemo.common.KeyRange;
 import org.apache.nemo.common.Pair;
 import org.apache.nemo.common.ir.edge.executionproperty.PartitionSetProperty;
 import org.apache.nemo.common.ir.vertex.executionproperty.ResourceAntiAffinityProperty;
 import org.apache.nemo.common.partitioner.HashPartitioner;
-import org.apache.nemo.common.partitioner.Partitioner;
 import org.junit.Before;
 import org.junit.Test;
 
diff --git a/conf/pom.xml b/conf/pom.xml
index d36f615..c570d6d 100644
--- a/conf/pom.xml
+++ b/conf/pom.xml
@@ -17,24 +17,25 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-project</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-project</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
 
-    <artifactId>nemo-conf</artifactId>
-    <name>Nemo Job Configuration</name>
+  <artifactId>nemo-conf</artifactId>
+  <name>Nemo Job Configuration</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.reef</groupId>
-            <artifactId>reef-common</artifactId>
-            <version>${reef.version}</version>
-        </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.reef</groupId>
+      <artifactId>reef-common</artifactId>
+      <version>${reef.version}</version>
+    </dependency>
+  </dependencies>
 </project>
diff --git a/conf/src/main/java/org/apache/nemo/conf/JobConf.java b/conf/src/main/java/org/apache/nemo/conf/JobConf.java
index c0df1a6..684f48c 100644
--- a/conf/src/main/java/org/apache/nemo/conf/JobConf.java
+++ b/conf/src/main/java/org/apache/nemo/conf/JobConf.java
@@ -120,7 +120,7 @@
    * The name of the optimization policy.
    */
   @NamedParameter(doc = "The canonical name of the optimization policy", short_name = "optimization_policy",
-      default_value = "org.apache.nemo.compiler.optimizer.policy.DefaultPolicy")
+    default_value = "org.apache.nemo.compiler.optimizer.policy.DefaultPolicy")
   public final class OptimizationPolicy implements Name<String> {
   }
 
@@ -137,7 +137,7 @@
    * The fraction of container memory not to use fo the JVM heap.
    */
   @NamedParameter(doc = "The fraction of the container memory not to use for the JVM heap", short_name = "heap_slack",
-      default_value = "0.3")
+    default_value = "0.3")
   public final class JVMHeapSlack implements Name<Double> {
   }
 
@@ -171,7 +171,7 @@
    * Used for fault-injected tests.
    */
   @NamedParameter(doc = "Executor crashes after expected time, does not crash when -1",
-      short_name = "executor_poison_sec", default_value = "-1")
+    short_name = "executor_poison_sec", default_value = "-1")
   public final class ExecutorPosionSec implements Name<Integer> {
   }
 
@@ -179,7 +179,7 @@
    * Path to the JSON file that specifies bandwidth between locations.
    */
   @NamedParameter(doc = "Path to the JSON file that specifies bandwidth between locations",
-      short_name = "bandwidth_json", default_value = "")
+    short_name = "bandwidth_json", default_value = "")
   public final class BandwidthJSONPath implements Name<String> {
   }
 
@@ -187,7 +187,7 @@
    * Path to the JSON file that specifies resource layout.
    */
   @NamedParameter(doc = "Path to the JSON file that specifies resources for executors", short_name = "executor_json",
-      default_value = "")
+    default_value = "")
   public final class ExecutorJSONPath implements Name<String> {
   }
 
@@ -212,7 +212,7 @@
    * Number of I/O threads for block fetch requests from other executor.
    */
   @NamedParameter(doc = "Number of I/O threads for block fetch request.", short_name = "io_request_threads",
-      default_value = "5")
+    default_value = "5")
   public final class IORequestHandleThreadsTotal implements Name<Integer> {
   }
 
@@ -220,7 +220,7 @@
    * Maximum number of parallel downloads for a runtime edge.
    */
   @NamedParameter(doc = "Maximum number of parallel downloads for a runtime edge.", short_name = "max_downloads",
-      default_value = "30")
+    default_value = "30")
   public final class MaxNumDownloadsForARuntimeEdge implements Name<Integer> {
   }
 
@@ -228,7 +228,7 @@
    * The number of serialization threads for scheduling.
    */
   @NamedParameter(doc = "Number of serialization thread for scheduling", short_name = "schedule_ser_thread",
-      default_value = "8")
+    default_value = "8")
   public final class ScheduleSerThread implements Name<Integer> {
   }
 
@@ -236,7 +236,7 @@
    * The TCP port to which local block transfer binds. 0 means random port.
    */
   @NamedParameter(doc = "Port to which PartitionTransport binds (0 means random port)",
-      short_name = "block_port", default_value = "0")
+    short_name = "block_port", default_value = "0")
   public final class PartitionTransportServerPort implements Name<Integer> {
   }
 
@@ -244,7 +244,7 @@
    * The maximum length which the pending connection queue of block transfer may grow to.
    */
   @NamedParameter(doc = "The maximum number of pending connections to PartitionTransport server",
-      short_name = "block_backlog", default_value = "128")
+    short_name = "block_backlog", default_value = "128")
   public final class PartitionTransportServerBacklog implements Name<Integer> {
   }
 
@@ -252,7 +252,7 @@
    * The number of listening threads of block transfer server.
    */
   @NamedParameter(doc = "The number of listening threads of PartitionTransport server",
-      short_name = "block_threads_listening", default_value = "3")
+    short_name = "block_threads_listening", default_value = "3")
   public final class PartitionTransportServerNumListeningThreads implements Name<Integer> {
   }
 
@@ -261,7 +261,7 @@
    * which work on accepted connections.
    */
   @NamedParameter(doc = "The number of working threads of PartitionTransport server",
-      short_name = "block_threads_working", default_value = "10")
+    short_name = "block_threads_working", default_value = "10")
   public final class PartitionTransportServerNumWorkingThreads implements Name<Integer> {
   }
 
@@ -269,7 +269,7 @@
    * The number of threads of block transfer client.
    */
   @NamedParameter(doc = "The number of threads of PartitionTransport client",
-      short_name = "block_threads_client", default_value = "10")
+    short_name = "block_threads_client", default_value = "10")
   public final class PartitionTransportClientNumThreads implements Name<Integer> {
   }
 
@@ -288,9 +288,9 @@
   public static final OptionalParameter<String> GLUSTER_DISK_DIRECTORY = new OptionalParameter<>();
 
   public static final ConfigurationModule EXECUTOR_CONF = new JobConf()
-      .bindNamedParameter(ExecutorId.class, EXECUTOR_ID)
-      .bindNamedParameter(JobId.class, JOB_ID)
-      .bindNamedParameter(FileDirectory.class, LOCAL_DISK_DIRECTORY)
-      .bindNamedParameter(GlusterVolumeDirectory.class, GLUSTER_DISK_DIRECTORY)
-      .build();
+    .bindNamedParameter(ExecutorId.class, EXECUTOR_ID)
+    .bindNamedParameter(JobId.class, JOB_ID)
+    .bindNamedParameter(FileDirectory.class, LOCAL_DISK_DIRECTORY)
+    .bindNamedParameter(GlusterVolumeDirectory.class, GLUSTER_DISK_DIRECTORY)
+    .build();
 }
diff --git a/examples/beam/pom.xml b/examples/beam/pom.xml
index 2748249..93b3388 100644
--- a/examples/beam/pom.xml
+++ b/examples/beam/pom.xml
@@ -17,103 +17,104 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-examples</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-examples</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
 
-    <artifactId>nemo-examples-beam</artifactId>
-    <name>Nemo Examples: Beam</name>
+  <artifactId>nemo-examples-beam</artifactId>
+  <name>Nemo Examples: Beam</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-compiler-frontend-beam</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-client</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>com.github.fommil.netlib</groupId>
-            <artifactId>all</artifactId>
-            <version>${netlib.version}</version>
-            <type>pom</type>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.beam</groupId>
-            <artifactId>beam-sdks-java-io-hadoop-input-format</artifactId>
-            <version>${beam.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.beam</groupId>
-          <artifactId>beam-sdks-java-extensions-sql</artifactId>
-          <version>${beam.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-core</artifactId>
-            <version>${hadoop.version}</version>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <version>${hadoop.version}</version>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.slf4j</groupId>
-                    <artifactId>slf4j-api</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.slf4j</groupId>
-                    <artifactId>slf4j-log4j12</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-compiler-frontend-beam</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-client</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.github.fommil.netlib</groupId>
+      <artifactId>all</artifactId>
+      <version>${netlib.version}</version>
+      <type>pom</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.beam</groupId>
+      <artifactId>beam-sdks-java-io-hadoop-input-format</artifactId>
+      <version>${beam.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.beam</groupId>
+      <artifactId>beam-sdks-java-extensions-sql</artifactId>
+      <version>${beam.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-log4j12</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+  </dependencies>
 
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-deploy-plugin</artifactId>
-                <version>2.8.2</version>
-                <configuration>
-                    <skip>true</skip>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-                <version>3.0.0</version>
-                <executions>
-                    <execution>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>shade</goal>
-                        </goals>
-                        <configuration>
-                            <outputFile>
-                                ${project.build.directory}/${project.artifactId}-${project.version}-shaded.jar
-                            </outputFile>
-                            <transformers>
-                                <!-- Required for using beam-hadoop: See https://stackoverflow.com/questions/44365545
-                                -->
-                                <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer" />
-                            </transformers>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-deploy-plugin</artifactId>
+        <version>2.8.2</version>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+        <version>3.0.0</version>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>shade</goal>
+            </goals>
+            <configuration>
+              <outputFile>
+                ${project.build.directory}/${project.artifactId}-${project.version}-shaded.jar
+              </outputFile>
+              <transformers>
+                <!-- Required for using beam-hadoop: See https://stackoverflow.com/questions/44365545
+                -->
+                <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"/>
+              </transformers>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
 </project>
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/AlternatingLeastSquare.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/AlternatingLeastSquare.java
index 84dcc93..4ad3521 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/AlternatingLeastSquare.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/AlternatingLeastSquare.java
@@ -20,7 +20,6 @@
 
 import com.github.fommil.netlib.BLAS;
 import com.github.fommil.netlib.LAPACK;
-import org.apache.nemo.compiler.frontend.beam.transform.LoopCompositeTransform;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.CoderProviders;
 import org.apache.beam.sdk.options.PipelineOptions;
@@ -29,6 +28,7 @@
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.commons.lang.ArrayUtils;
+import org.apache.nemo.compiler.frontend.beam.transform.LoopCompositeTransform;
 import org.netlib.util.intW;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -57,6 +57,7 @@
 
     /**
      * Constructor for Parseline DoFn class.
+     *
      * @param isUserData flag that distinguishes user data from item data.
      */
     public ParseLine(final boolean isUserData) {
@@ -65,6 +66,7 @@
 
     /**
      * ProcessElement method for BEAM.
+     *
      * @param c Process context.
      * @throws Exception Exception on the way.
      */
@@ -102,7 +104,7 @@
    * A DoFn that relays a single vector list.
    */
   public static final class UngroupSingleVectorList
-      extends DoFn<KV<Integer, Iterable<float[]>>, KV<Integer, float[]>> {
+    extends DoFn<KV<Integer, Iterable<float[]>>, KV<Integer, float[]>> {
 
     /**
      * ProcessElement method for BEAM.
@@ -129,7 +131,7 @@
    * Combiner for the training data.
    */
   public static final class TrainingDataCombiner
-      extends Combine.CombineFn<KV<int[], float[]>, List<KV<int[], float[]>>, KV<int[], float[]>> {
+    extends Combine.CombineFn<KV<int[], float[]>, List<KV<int[], float[]>>, KV<int[], float[]>> {
 
     @Override
     public List<KV<int[], float[]>> createAccumulator() {
@@ -263,7 +265,7 @@
    * The loop updates the user matrix and the item matrix in each iteration.
    */
   public static final class UpdateUserAndItemMatrix
-      extends LoopCompositeTransform<PCollection<KV<Integer, float[]>>, PCollection<KV<Integer, float[]>>> {
+    extends LoopCompositeTransform<PCollection<KV<Integer, float[]>>, PCollection<KV<Integer, float[]>>> {
     private final Integer numFeatures;
     private final double lambda;
     private final PCollection<KV<Integer, KV<int[], float[]>>> parsedUserData;
@@ -271,8 +273,9 @@
 
     /**
      * Constructor of UpdateUserAndItemMatrix CompositeTransform.
-     * @param numFeatures number of features.
-     * @param lambda lambda.
+     *
+     * @param numFeatures    number of features.
+     * @param lambda         lambda.
      * @param parsedUserData PCollection of parsed user data.
      * @param parsedItemData PCollection of parsed item data.
      */
@@ -289,16 +292,16 @@
     public PCollection<KV<Integer, float[]>> expand(final PCollection<KV<Integer, float[]>> itemMatrix) {
       // Make Item Matrix view.
       final PCollectionView<Map<Integer, float[]>> itemMatrixView =
-          itemMatrix.apply(GroupByKey.create()).apply(ParDo.of(new UngroupSingleVectorList())).apply(View.asMap());
+        itemMatrix.apply(GroupByKey.create()).apply(ParDo.of(new UngroupSingleVectorList())).apply(View.asMap());
 
       // Get new User Matrix
       final PCollectionView<Map<Integer, float[]>> userMatrixView = parsedUserData
-          .apply(ParDo.of(new CalculateNextMatrix(numFeatures, lambda, itemMatrixView)).withSideInputs(itemMatrixView))
-          .apply(GroupByKey.create()).apply(ParDo.of(new UngroupSingleVectorList())).apply(View.asMap());
+        .apply(ParDo.of(new CalculateNextMatrix(numFeatures, lambda, itemMatrixView)).withSideInputs(itemMatrixView))
+        .apply(GroupByKey.create()).apply(ParDo.of(new UngroupSingleVectorList())).apply(View.asMap());
 
       // return new Item Matrix
       return parsedItemData.apply(ParDo.of(new CalculateNextMatrix(numFeatures, lambda, userMatrixView))
-          .withSideInputs(userMatrixView));
+        .withSideInputs(userMatrixView));
     }
   }
 
@@ -310,7 +313,7 @@
     private final boolean isDeterministic;
 
     /**
-     * @param numFeatures number of the features.
+     * @param numFeatures     number of the features.
      * @param isDeterministic whether or not to initialize the matrix in deterministic mode.
      */
     CreateInitialMatrix(final int numFeatures,
@@ -349,6 +352,7 @@
 
   /**
    * Main function for the ALS BEAM program.
+   *
    * @param args arguments.
    */
   public static void main(final String[] args) throws ClassNotFoundException {
@@ -385,17 +389,17 @@
 
     // Parse data for item
     final PCollection<KV<Integer, KV<int[], float[]>>> parsedItemData = rawData
-        .apply(ParDo.of(new ParseLine(false)))
-        .apply(Combine.perKey(new TrainingDataCombiner()));
+      .apply(ParDo.of(new ParseLine(false)))
+      .apply(Combine.perKey(new TrainingDataCombiner()));
 
     // Parse data for user
     final PCollection<KV<Integer, KV<int[], float[]>>> parsedUserData = rawData
-        .apply(ParDo.of(new ParseLine(true)))
-        .apply(Combine.perKey(new TrainingDataCombiner()));
+      .apply(ParDo.of(new ParseLine(true)))
+      .apply(Combine.perKey(new TrainingDataCombiner()));
 
     // Create Initial Item Matrix
     PCollection<KV<Integer, float[]>> itemMatrix =
-        parsedItemData.apply(ParDo.of(new CreateInitialMatrix(numFeatures, checkOutput)));
+      parsedItemData.apply(ParDo.of(new CreateInitialMatrix(numFeatures, checkOutput)));
 
     // Iterations to update Item Matrix.
     for (int i = 0; i < numItr; i++) {
@@ -405,15 +409,15 @@
 
     if (checkOutput) {
       final PCollection<String> result = itemMatrix.apply(MapElements.<KV<Integer, float[]>, String>via(
-          new SimpleFunction<KV<Integer, float[]>, String>() {
-            @Override
-            public String apply(final KV<Integer, float[]> elem) {
-              final List<String> values = Stream.of(ArrayUtils.toObject(elem.getValue()))
-                  .map(String::valueOf)
-                  .collect(Collectors.toList());
-              return elem.getKey() + "," + String.join(",", values);
-            }
-          }));
+        new SimpleFunction<KV<Integer, float[]>, String>() {
+          @Override
+          public String apply(final KV<Integer, float[]> elem) {
+            final List<String> values = Stream.of(ArrayUtils.toObject(elem.getValue()))
+              .map(String::valueOf)
+              .collect(Collectors.toList());
+            return elem.getKey() + "," + String.join(",", values);
+          }
+        }));
 
       GenericSourceSink.write(result, outputFilePath);
     }
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/AlternatingLeastSquareInefficient.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/AlternatingLeastSquareInefficient.java
index 06588e6..5d9a611 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/AlternatingLeastSquareInefficient.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/AlternatingLeastSquareInefficient.java
@@ -18,7 +18,6 @@
  */
 package org.apache.nemo.examples.beam;
 
-import org.apache.nemo.compiler.frontend.beam.transform.LoopCompositeTransform;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.coders.CoderProviders;
 import org.apache.beam.sdk.options.PipelineOptions;
@@ -29,11 +28,13 @@
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionView;
-
-import java.util.*;
+import org.apache.nemo.compiler.frontend.beam.transform.LoopCompositeTransform;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.Arrays;
+import java.util.Map;
+
 /**
  * Sample Alternating Least Square application.
  * This application have been made separately, to demonstrate the LoopInvariantCodeMotion optimization pass.
@@ -53,7 +54,7 @@
    * The loop updates the user matrix and the item matrix in each iteration.
    */
   public static final class UpdateUserAndItemMatrix extends LoopCompositeTransform<
-      PCollection<KV<Integer, float[]>>, PCollection<KV<Integer, float[]>>> {
+    PCollection<KV<Integer, float[]>>, PCollection<KV<Integer, float[]>>> {
     private final Integer numFeatures;
     private final Double lambda;
     private final PCollection<String> rawData;
@@ -61,9 +62,10 @@
 
     /**
      * Constructor of UpdateUserAndItemMatrix CompositeTransform.
-     * @param numFeatures number of features.
-     * @param lambda lambda.
-     * @param rawData PCollection of raw data.
+     *
+     * @param numFeatures    number of features.
+     * @param lambda         lambda.
+     * @param rawData        PCollection of raw data.
      * @param parsedItemData PCollection of parsed item data.
      */
     UpdateUserAndItemMatrix(final Integer numFeatures, final Double lambda,
@@ -79,25 +81,26 @@
     public PCollection<KV<Integer, float[]>> expand(final PCollection<KV<Integer, float[]>> itemMatrix) {
       // Parse data for user
       final PCollection<KV<Integer, KV<int[], float[]>>> parsedUserData = rawData
-          .apply(ParDo.of(new AlternatingLeastSquare.ParseLine(true)))
-          .apply(Combine.perKey(new AlternatingLeastSquare.TrainingDataCombiner()));
+        .apply(ParDo.of(new AlternatingLeastSquare.ParseLine(true)))
+        .apply(Combine.perKey(new AlternatingLeastSquare.TrainingDataCombiner()));
 
       // Make Item Matrix view.
       final PCollectionView<Map<Integer, float[]>> itemMatrixView = itemMatrix.apply(View.asMap());
       // Get new User Matrix
       final PCollectionView<Map<Integer, float[]>> userMatrixView = parsedUserData
-          .apply(ParDo.of(new AlternatingLeastSquare.CalculateNextMatrix(numFeatures, lambda, itemMatrixView))
-              .withSideInputs(itemMatrixView))
-          .apply(View.asMap());
+        .apply(ParDo.of(new AlternatingLeastSquare.CalculateNextMatrix(numFeatures, lambda, itemMatrixView))
+          .withSideInputs(itemMatrixView))
+        .apply(View.asMap());
       // return new Item Matrix
       return parsedItemData.apply(
-          ParDo.of(new AlternatingLeastSquare.CalculateNextMatrix(numFeatures, lambda, userMatrixView))
+        ParDo.of(new AlternatingLeastSquare.CalculateNextMatrix(numFeatures, lambda, userMatrixView))
           .withSideInputs(userMatrixView));
     }
   }
 
   /**
    * Main function for the ALS BEAM program.
+   *
    * @param args arguments.
    */
   public static void main(final String[] args) throws ClassNotFoundException {
@@ -126,29 +129,29 @@
 
     // Parse data for item
     final PCollection<KV<Integer, KV<int[], float[]>>> parsedItemData = rawData
-        .apply(ParDo.of(new AlternatingLeastSquare.ParseLine(false)))
-        .apply(Combine.perKey(new AlternatingLeastSquare.TrainingDataCombiner()));
+      .apply(ParDo.of(new AlternatingLeastSquare.ParseLine(false)))
+      .apply(Combine.perKey(new AlternatingLeastSquare.TrainingDataCombiner()));
 
     // Create Initial Item Matrix
     PCollection<KV<Integer, float[]>> itemMatrix = parsedItemData
-        .apply(ParDo.of(new DoFn<KV<Integer, KV<int[], float[]>>, KV<Integer, float[]>>() {
-          @ProcessElement
-          public void processElement(final ProcessContext c) throws Exception {
-            final float[] result = new float[numFeatures];
+      .apply(ParDo.of(new DoFn<KV<Integer, KV<int[], float[]>>, KV<Integer, float[]>>() {
+        @ProcessElement
+        public void processElement(final ProcessContext c) throws Exception {
+          final float[] result = new float[numFeatures];
 
-            final KV<Integer, KV<int[], float[]>> element = c.element();
-            final float[] ratings = element.getValue().getValue();
-            for (int i = 0; i < ratings.length; i++) {
-              result[0] += ratings[i];
-            }
-
-            result[0] /= ratings.length;
-            for (int i = 1; i < result.length; i++) {
-              result[i] = (float) (Math.random() * 0.01);
-            }
-            c.output(KV.of(element.getKey(), result));
+          final KV<Integer, KV<int[], float[]>> element = c.element();
+          final float[] ratings = element.getValue().getValue();
+          for (int i = 0; i < ratings.length; i++) {
+            result[0] += ratings[i];
           }
-        }));
+
+          result[0] /= ratings.length;
+          for (int i = 1; i < result.length; i++) {
+            result[i] = (float) (Math.random() * 0.01);
+          }
+          c.output(KV.of(element.getKey(), result));
+        }
+      }));
 
     // Iterations to update Item Matrix.
     for (Integer i = 0; i < numItr; i++) {
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/Broadcast.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/Broadcast.java
index 522eee2..ad7f8e2 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/Broadcast.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/Broadcast.java
@@ -41,6 +41,7 @@
 
   /**
    * Main function for the BEAM program.
+   *
    * @param args arguments.
    */
   public static void main(final String[] args) {
@@ -53,19 +54,19 @@
     final PCollectionView<Iterable<String>> allCollection = elemCollection.apply(View.<String>asIterable());
 
     final PCollection<String> result = elemCollection.apply(ParDo.of(new DoFn<String, String>() {
-          @ProcessElement
-          public void processElement(final ProcessContext c) {
-            final String line = c.element();
-            final Iterable<String> all = c.sideInput(allCollection);
-            final Optional<String> appended = StreamSupport.stream(all.spliterator(), false)
-                .reduce((l, r) -> l + '\n' + r);
-            if (appended.isPresent()) {
-              c.output("line: " + line + "\n" + appended.get());
-            } else {
-              c.output("error");
-            }
+        @ProcessElement
+        public void processElement(final ProcessContext c) {
+          final String line = c.element();
+          final Iterable<String> all = c.sideInput(allCollection);
+          final Optional<String> appended = StreamSupport.stream(all.spliterator(), false)
+            .reduce((l, r) -> l + '\n' + r);
+          if (appended.isPresent()) {
+            c.output("line: " + line + "\n" + appended.get());
+          } else {
+            c.output("error");
           }
-        }).withSideInputs(allCollection)
+        }
+      }).withSideInputs(allCollection)
     );
 
     GenericSourceSink.write(result, outputFilePath);
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/GenericSourceSink.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/GenericSourceSink.java
index b9f3a15..7aff8da 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/GenericSourceSink.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/GenericSourceSink.java
@@ -57,9 +57,10 @@
 
   /**
    * Read data.
-   * @param pipeline  beam pipeline
-   * @param path      path to read
-   * @return          returns the read value
+   *
+   * @param pipeline beam pipeline
+   * @param path     path to read
+   * @return returns the read value
    */
   public static PCollection<String> read(final Pipeline pipeline,
                                          final String path) {
@@ -72,19 +73,19 @@
 
       // Without translations, Beam internally does some weird cloning
       final HadoopFormatIO.Read<Long, String> read = HadoopFormatIO.<Long, String>read()
-          .withConfiguration(hadoopConf)
-          .withKeyTranslation(new SimpleFunction<LongWritable, Long>() {
-            @Override
-            public Long apply(final LongWritable longWritable) {
-              return longWritable.get();
-            }
-          })
-          .withValueTranslation(new SimpleFunction<Text, String>() {
-            @Override
-            public String apply(final Text text) {
-              return text.toString();
-            }
-          });
+        .withConfiguration(hadoopConf)
+        .withKeyTranslation(new SimpleFunction<LongWritable, Long>() {
+          @Override
+          public Long apply(final LongWritable longWritable) {
+            return longWritable.get();
+          }
+        })
+        .withValueTranslation(new SimpleFunction<Text, String>() {
+          @Override
+          public String apply(final Text text) {
+            return text.toString();
+          }
+        });
       return pipeline.apply(read).apply(MapElements.into(TypeDescriptor.of(String.class)).via(KV::getValue));
     } else {
       return pipeline.apply(TextIO.read().from(path));
@@ -94,9 +95,10 @@
   /**
    * Write data.
    * NEMO-365: This method could later be replaced using the HadoopFormatIO class.
+   *
    * @param dataToWrite data to write
    * @param path        path to write data
-   * @return            returns {@link PDone}
+   * @return returns {@link PDone}
    */
   public static PDone write(final PCollection<String> dataToWrite,
                             final String path) {
@@ -110,8 +112,9 @@
 
   /**
    * Check if given path is HDFS path.
+   *
    * @param path path to check
-   * @return     boolean value indicating whether the path is HDFS path or not
+   * @return boolean value indicating whether the path is HDFS path or not
    */
   private static boolean isHDFSPath(final String path) {
     return path.startsWith("hdfs://") || path.startsWith("s3a://") || path.startsWith("file://");
@@ -132,7 +135,7 @@
   /**
    * Constructor.
    *
-   * @param path    HDFS path
+   * @param path HDFS path
    */
   HDFSWrite(final String path) {
     this.path = path;
@@ -158,7 +161,8 @@
 
   /**
    * process element.
-   * @param c          context {@link ProcessContext}
+   *
+   * @param c context {@link ProcessContext}
    * @throws Exception exception.
    */
   @ProcessElement
@@ -174,7 +178,8 @@
 
   /**
    * Teardown.
-   * @throws IOException  output stream exception
+   *
+   * @throws IOException output stream exception
    */
   @Teardown
   public void tearDown() throws IOException {
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/MinimalWordCount.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/MinimalWordCount.java
index 2b595f6..4ac1085 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/MinimalWordCount.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/MinimalWordCount.java
@@ -17,6 +17,7 @@
  * under the License.
  */
 package org.apache.nemo.examples.beam;
+
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.TextIO;
 import org.apache.beam.sdk.options.PipelineOptions;
@@ -28,6 +29,7 @@
 import org.apache.beam.sdk.values.TypeDescriptors;
 
 import java.util.Arrays;
+
 /**
  * MinimalWordCount program from BEAM.
  */
@@ -37,8 +39,10 @@
    */
   private MinimalWordCount() {
   }
+
   /**
    * Main function for the MinimalWordCount Beam program.
+   *
    * @param args arguments.
    */
   public static void main(final String[] args) {
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/MultinomialLogisticRegression.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/MultinomialLogisticRegression.java
index 7a671fa..ebae035 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/MultinomialLogisticRegression.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/MultinomialLogisticRegression.java
@@ -18,8 +18,6 @@
  */
 package org.apache.nemo.examples.beam;
 
-import org.apache.nemo.compiler.frontend.beam.transform.LoopCompositeTransform;
-import org.apache.nemo.common.Pair;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.*;
@@ -30,11 +28,13 @@
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
-
-import java.util.*;
+import org.apache.nemo.common.Pair;
+import org.apache.nemo.compiler.frontend.beam.transform.LoopCompositeTransform;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.*;
+
 /**
  * Sample Multinomial Logistic Regression application.
  */
@@ -62,8 +62,9 @@
 
     /**
      * Constructor for CalculateGradient DoFn class.
-     * @param modelView PCollectionView of the model.
-     * @param numClasses number of classes.
+     *
+     * @param modelView   PCollectionView of the model.
+     * @param numClasses  number of classes.
      * @param numFeatures number of features.
      */
     CalculateGradient(final PCollectionView<Map<Integer, List<Double>>> modelView,
@@ -94,6 +95,7 @@
 
     /**
      * Method for parsing lines of inputs.
+     *
      * @param input input line.
      * @return the parsed key-value pair.
      */
@@ -118,6 +120,7 @@
 
     /**
      * ProcessElement method for BEAM.
+     *
      * @param c Process context.
      * @throws Exception Exception on the way.
      */
@@ -252,11 +255,12 @@
 
     /**
      * Constructor for ApplyGradient DoFn class.
-     * @param numFeatures number of features.
-     * @param numClasses number of classes.
+     *
+     * @param numFeatures  number of features.
+     * @param numClasses   number of classes.
      * @param iterationNum number of iteration.
-     * @param gradientTag TupleTag of gradient.
-     * @param modelTag TupleTag of model.
+     * @param gradientTag  TupleTag of gradient.
+     * @param modelTag     TupleTag of model.
      */
     ApplyGradient(final Integer numFeatures, final Integer numClasses, final Integer iterationNum,
                   final TupleTag<List<Double>> gradientTag, final TupleTag<List<Double>> modelTag) {
@@ -269,6 +273,7 @@
 
     /**
      * ProcessElement method for BEAM.
+     *
      * @param c Process context.
      * @throws Exception Exception on the way.
      */
@@ -326,7 +331,7 @@
    * Combine Function for Iterable of gradients.
    */
   public static final class CombineFunctionForIterable
-      implements SerializableFunction<Iterable<List<Double>>, List<Double>> {
+    implements SerializableFunction<Iterable<List<Double>>, List<Double>> {
     @Override
     public List<Double> apply(final Iterable<List<Double>> gradients) {
       List<Double> ret = null;
@@ -348,11 +353,11 @@
   }
 
   /**
-   + Composite transform that wraps the transforms inside the loop.
-   + The loop updates the model in each iteration.
+   * + Composite transform that wraps the transforms inside the loop.
+   * + The loop updates the model in each iteration.
    */
   public static final class UpdateModel
-      extends LoopCompositeTransform<PCollection<KV<Integer, List<Double>>>, PCollection<KV<Integer, List<Double>>>> {
+    extends LoopCompositeTransform<PCollection<KV<Integer, List<Double>>>, PCollection<KV<Integer, List<Double>>>> {
     private final Integer numFeatures;
     private final Integer numClasses;
     private final Integer iterationNum;
@@ -360,10 +365,11 @@
 
     /**
      * Constructor of UpdateModel CompositeTransform.
-     * @param numFeatures number of features.
-     * @param numClasses number of classes.
+     *
+     * @param numFeatures  number of features.
+     * @param numClasses   number of classes.
      * @param iterationNum iteration number.
-     * @param readInput PCollection of
+     * @param readInput    PCollection of
      */
     UpdateModel(final Integer numFeatures, final Integer numClasses, final Integer iterationNum,
                 final PCollection<String> readInput) {
@@ -380,28 +386,29 @@
 
       // Find gradient.
       final PCollection<KV<Integer, List<Double>>> gradient = readInput
-          .apply(ParDo.of(
-              new CalculateGradient(modelView, numClasses, numFeatures)).withSideInputs(modelView))
-          .apply(Combine.perKey(new CombineFunction()));
+        .apply(ParDo.of(
+          new CalculateGradient(modelView, numClasses, numFeatures)).withSideInputs(modelView))
+        .apply(Combine.perKey(new CombineFunction()));
 
       // Tags for CoGroupByKey.
       final TupleTag<List<Double>> gradientTag = new TupleTag<>();
       final TupleTag<List<Double>> modelTag = new TupleTag<>();
       final KeyedPCollectionTuple<Integer> coGbkInput = KeyedPCollectionTuple
-          .of(gradientTag, gradient)
-          .and(modelTag, model);
+        .of(gradientTag, gradient)
+        .and(modelTag, model);
 
       final PCollection<KV<Integer, CoGbkResult>> groupResult =
-          coGbkInput.apply(CoGroupByKey.create());
+        coGbkInput.apply(CoGroupByKey.create());
 
       // Update the model
       return groupResult
-          .apply(ParDo.of(new ApplyGradient(numFeatures, numClasses, iterationNum, gradientTag, modelTag)));
+        .apply(ParDo.of(new ApplyGradient(numFeatures, numClasses, iterationNum, gradientTag, modelTag)));
     }
   }
 
   /**
    * Main function for the MLR BEAM program.
+   *
    * @param args arguments.
    */
   public static void main(final String[] args) {
@@ -425,23 +432,23 @@
 
     // Initialization of the model for Logistic Regression.
     PCollection<KV<Integer, List<Double>>> model = p
-        .apply(Create.of(initialModelKeys))
-        .apply(ParDo.of(new DoFn<Integer, KV<Integer, List<Double>>>() {
-          @ProcessElement
-          public void processElement(final ProcessContext c) throws Exception {
-            if (c.element() == numClasses - 1) {
-              final List<Double> model = new ArrayList<>(1);
-              model.add(0.0);
-              c.output(KV.of(c.element(), model));
-            } else {
-              final List<Double> model = new ArrayList<>(numFeatures);
-              for (Integer i = 0; i < numFeatures; i++) {
-                model.add(i, 0.0);
-              }
-              c.output(KV.of(c.element(), model));
+      .apply(Create.of(initialModelKeys))
+      .apply(ParDo.of(new DoFn<Integer, KV<Integer, List<Double>>>() {
+        @ProcessElement
+        public void processElement(final ProcessContext c) throws Exception {
+          if (c.element() == numClasses - 1) {
+            final List<Double> model = new ArrayList<>(1);
+            model.add(0.0);
+            c.output(KV.of(c.element(), model));
+          } else {
+            final List<Double> model = new ArrayList<>(numFeatures);
+            for (Integer i = 0; i < numFeatures; i++) {
+              model.add(i, 0.0);
             }
+            c.output(KV.of(c.element(), model));
           }
-        }));
+        }
+      }));
 
     // Read input data
     final PCollection<String> readInput = GenericSourceSink.read(p, inputFilePath);
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/NemoPipelineOptionsFactory.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/NemoPipelineOptionsFactory.java
index 000d238..43e5e40 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/NemoPipelineOptionsFactory.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/NemoPipelineOptionsFactory.java
@@ -33,6 +33,7 @@
 
   /**
    * Create a PipelineOptions for nemo runner.
+   *
    * @return pipeline options
    */
   public static PipelineOptions create() {
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/NetworkTraceAnalysis.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/NetworkTraceAnalysis.java
index e2a829d..f4a9ff3 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/NetworkTraceAnalysis.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/NetworkTraceAnalysis.java
@@ -20,7 +20,10 @@
 
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.*;
+import org.apache.beam.sdk.transforms.Filter;
+import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.SimpleFunction;
 import org.apache.beam.sdk.transforms.join.CoGbkResult;
 import org.apache.beam.sdk.transforms.join.CoGroupByKey;
 import org.apache.beam.sdk.transforms.join.KeyedPCollectionTuple;
@@ -49,6 +52,7 @@
 
   /**
    * Main function for the Beam program.
+   *
    * @param args arguments.
    */
   public static void main(final String[] args) {
@@ -68,7 +72,7 @@
       }
     };
     final SimpleFunction<KV<String, Iterable<KV<String, Long>>>, KV<String, Long>> mapToStdev
-        = new SimpleFunction<KV<String, Iterable<KV<String, Long>>>, KV<String, Long>>() {
+      = new SimpleFunction<KV<String, Iterable<KV<String, Long>>>, KV<String, Long>>() {
       @Override
       public KV<String, Long> apply(final KV<String, Iterable<KV<String, Long>>> kv) {
         return KV.of(kv.getKey(), stdev(kv.getValue()));
@@ -77,44 +81,44 @@
 
     final Pipeline p = Pipeline.create(options);
     final PCollection<KV<String, Long>> in0 = GenericSourceSink.read(p, input0FilePath)
-        .apply(Filter.by(filter))
-        .apply(MapElements.via(new SimpleFunction<String, KV<String, KV<String, Long>>>() {
-          @Override
-          public KV<String, KV<String, Long>> apply(final String line) {
-            final Matcher matcher = pattern.matcher(line);
-            matcher.find();
-            return KV.of(matcher.group(2), KV.of(matcher.group(1), Long.valueOf(matcher.group(3))));
-          }
-        }))
-        .apply(GroupByKey.create())
-        .apply(MapElements.via(mapToStdev));
+      .apply(Filter.by(filter))
+      .apply(MapElements.via(new SimpleFunction<String, KV<String, KV<String, Long>>>() {
+        @Override
+        public KV<String, KV<String, Long>> apply(final String line) {
+          final Matcher matcher = pattern.matcher(line);
+          matcher.find();
+          return KV.of(matcher.group(2), KV.of(matcher.group(1), Long.valueOf(matcher.group(3))));
+        }
+      }))
+      .apply(GroupByKey.create())
+      .apply(MapElements.via(mapToStdev));
     final PCollection<KV<String, Long>> in1 = GenericSourceSink.read(p, input1FilePath)
-        .apply(Filter.by(filter))
-        .apply(MapElements.via(new SimpleFunction<String, KV<String, KV<String, Long>>>() {
-          @Override
-          public KV<String, KV<String, Long>> apply(final String line) {
-            final Matcher matcher = pattern.matcher(line);
-            matcher.find();
-            return KV.of(matcher.group(1), KV.of(matcher.group(2), Long.valueOf(matcher.group(3))));
-          }
-        }))
-        .apply(GroupByKey.create())
-        .apply(MapElements.via(mapToStdev));
+      .apply(Filter.by(filter))
+      .apply(MapElements.via(new SimpleFunction<String, KV<String, KV<String, Long>>>() {
+        @Override
+        public KV<String, KV<String, Long>> apply(final String line) {
+          final Matcher matcher = pattern.matcher(line);
+          matcher.find();
+          return KV.of(matcher.group(1), KV.of(matcher.group(2), Long.valueOf(matcher.group(3))));
+        }
+      }))
+      .apply(GroupByKey.create())
+      .apply(MapElements.via(mapToStdev));
     final TupleTag<Long> tag0 = new TupleTag<>();
     final TupleTag<Long> tag1 = new TupleTag<>();
     final PCollection<KV<String, CoGbkResult>> joined =
-        KeyedPCollectionTuple.of(tag0, in0).and(tag1, in1).apply(CoGroupByKey.create());
+      KeyedPCollectionTuple.of(tag0, in0).and(tag1, in1).apply(CoGroupByKey.create());
     final PCollection<String> result = joined
-        .apply(MapElements.via(new SimpleFunction<KV<String, CoGbkResult>, String>() {
-          @Override
-          public String apply(final KV<String, CoGbkResult> kv) {
-            final long source = getLong(kv.getValue().getAll(tag0));
-            final long destination = getLong(kv.getValue().getAll(tag1));
-            final String intermediate = kv.getKey();
-            return new StringBuilder(intermediate).append(",").append(source).append(",")
-                .append(destination).toString();
-          }
-        }));
+      .apply(MapElements.via(new SimpleFunction<KV<String, CoGbkResult>, String>() {
+        @Override
+        public String apply(final KV<String, CoGbkResult> kv) {
+          final long source = getLong(kv.getValue().getAll(tag0));
+          final long destination = getLong(kv.getValue().getAll(tag1));
+          final String intermediate = kv.getKey();
+          return new StringBuilder(intermediate).append(",").append(source).append(",")
+            .append(destination).toString();
+        }
+      }));
     GenericSourceSink.write(result, outputFilePath);
     p.run();
   }
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/PartitionWordsByLength.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/PartitionWordsByLength.java
index c4bf5d8..7845b18 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/PartitionWordsByLength.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/PartitionWordsByLength.java
@@ -61,33 +61,33 @@
     final PCollection<String> lines = GenericSourceSink.read(p, inputFilePath);
 
     PCollectionTuple results = lines
-        .apply(FlatMapElements
-            .into(TypeDescriptors.strings())
-            .via(line -> Arrays.asList(line.split(" "))))
-        .apply(ParDo.of(new DoFn<String, String>() {
-          // processElement with Beam OutputReceiver.
-          @ProcessElement
-          public void processElement(final ProcessContext c) {
-            final String word = c.element();
-            if (word.length() < 6) {
-              c.output(shortWordsTag, KV.of(word.length(), word));
-            } else if (word.length() < 11) {
-              c.output(longWordsTag, KV.of(word.length(), word));
-            } else if (word.length() > 12) {
-              c.output(veryVeryLongWordsTag, word);
-            } else {
-              c.output(word);
-            }
+      .apply(FlatMapElements
+        .into(TypeDescriptors.strings())
+        .via(line -> Arrays.asList(line.split(" "))))
+      .apply(ParDo.of(new DoFn<String, String>() {
+        // processElement with Beam OutputReceiver.
+        @ProcessElement
+        public void processElement(final ProcessContext c) {
+          final String word = c.element();
+          if (word.length() < 6) {
+            c.output(shortWordsTag, KV.of(word.length(), word));
+          } else if (word.length() < 11) {
+            c.output(longWordsTag, KV.of(word.length(), word));
+          } else if (word.length() > 12) {
+            c.output(veryVeryLongWordsTag, word);
+          } else {
+            c.output(word);
           }
-        }).withOutputTags(veryLongWordsTag, TupleTagList
-            .of(shortWordsTag).and(longWordsTag).and(veryVeryLongWordsTag)));
+        }
+      }).withOutputTags(veryLongWordsTag, TupleTagList
+        .of(shortWordsTag).and(longWordsTag).and(veryVeryLongWordsTag)));
 
     PCollection<String> shortWords = results.get(shortWordsTag)
-        .apply(GroupByKey.create())
-        .apply(MapElements.via(new FormatLines()));
+      .apply(GroupByKey.create())
+      .apply(MapElements.via(new FormatLines()));
     PCollection<String> longWords = results.get(longWordsTag)
-        .apply(GroupByKey.create())
-        .apply(MapElements.via(new FormatLines()));
+      .apply(GroupByKey.create())
+      .apply(MapElements.via(new FormatLines()));
     PCollection<String> veryLongWords = results.get(veryLongWordsTag);
     PCollection<String> veryVeryLongWords = results.get(veryVeryLongWordsTag);
 
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/PerKeyMedian.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/PerKeyMedian.java
index 268f4bc..5161bcd 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/PerKeyMedian.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/PerKeyMedian.java
@@ -18,17 +18,19 @@
  */
 package org.apache.nemo.examples.beam;
 
+import com.google.common.collect.Lists;
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.*;
+import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.SimpleFunction;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
-import com.google.common.collect.Lists;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Collections;
 import java.util.List;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * IP stat analysis example used in skew experiment.
@@ -44,6 +46,7 @@
 
   /**
    * Main function for the MR BEAM program.
+   *
    * @param args arguments.
    */
   public static void main(final String[] args) {
@@ -57,25 +60,25 @@
     long start = System.currentTimeMillis();
 
     final PCollection<String> result = GenericSourceSink.read(p, inputFilePath)
-        .apply(MapElements.via(new SimpleFunction<String, KV<String, String>>() {
-          @Override
-          public KV<String, String> apply(final String line) {
-            final String[] words = line.split(" ");
-            String key = words[0];
-            String value = words[1];
-            return KV.of(key, value);
-          }
-        }))
-        .apply(GroupByKey.create())
-        .apply(MapElements.via(new SimpleFunction<KV<String, Iterable<String>>, String>() {
-          @Override
-          public String apply(final KV<String, Iterable<String>> kv) {
-            final String key = kv.getKey();
-            List value = Lists.newArrayList(kv.getValue());
-            Collections.sort(value);
-            return key + ", " + value.get(value.size() / 2);
-          }
-        }));
+      .apply(MapElements.via(new SimpleFunction<String, KV<String, String>>() {
+        @Override
+        public KV<String, String> apply(final String line) {
+          final String[] words = line.split(" ");
+          String key = words[0];
+          String value = words[1];
+          return KV.of(key, value);
+        }
+      }))
+      .apply(GroupByKey.create())
+      .apply(MapElements.via(new SimpleFunction<KV<String, Iterable<String>>, String>() {
+        @Override
+        public String apply(final KV<String, Iterable<String>> kv) {
+          final String key = kv.getKey();
+          List value = Lists.newArrayList(kv.getValue());
+          Collections.sort(value);
+          return key + ", " + value.get(value.size() / 2);
+        }
+      }));
     GenericSourceSink.write(result, outputFilePath);
     p.run();
 
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/PerPercentileAverage.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/PerPercentileAverage.java
index e46946f..c6fb382 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/PerPercentileAverage.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/PerPercentileAverage.java
@@ -54,42 +54,42 @@
     final Pipeline p = Pipeline.create(options);
 
     PCollection<Student> students = GenericSourceSink.read(p, inputFilePath)
-        .apply(ParDo.of(new DoFn<String, Student>() {
-          @ProcessElement
-          public void processElement(final ProcessContext c) {
-            String[] line = c.element().split(" ");
-            c.output(new Student(Integer.parseInt(line[0]), Integer.parseInt(line[1]), Integer.parseInt(line[2])));
-          }
-        }))
-        .setCoder(SerializableCoder.of(Student.class));
+      .apply(ParDo.of(new DoFn<String, Student>() {
+        @ProcessElement
+        public void processElement(final ProcessContext c) {
+          String[] line = c.element().split(" ");
+          c.output(new Student(Integer.parseInt(line[0]), Integer.parseInt(line[1]), Integer.parseInt(line[2])));
+        }
+      }))
+      .setCoder(SerializableCoder.of(Student.class));
 
     PCollectionList<Student> studentsByPercentile =
-        // Make sure that each partition contain at least one element.
-        // If there are empty PCollections, successive WriteFiles may fail.
-        students.apply(Partition.of(10, new Partition.PartitionFn<Student>() {
-          public int partitionFor(final Student student, final int numPartitions) {
-            return student.getPercentile() / numPartitions;
-          }
-        }));
+      // Make sure that each partition contain at least one element.
+      // If there are empty PCollections, successive WriteFiles may fail.
+      students.apply(Partition.of(10, new Partition.PartitionFn<Student>() {
+        public int partitionFor(final Student student, final int numPartitions) {
+          return student.getPercentile() / numPartitions;
+        }
+      }));
 
-    PCollection<String> [] results  = new PCollection[10];
+    PCollection<String>[] results = new PCollection[10];
     for (int i = 0; i < 10; i++) {
       results[i] = studentsByPercentile.get(i)
-          .apply(MapElements.via(new SimpleFunction<Student, KV<String, Integer>>() {
-            @Override
-            public KV<String, Integer> apply(final Student student) {
-              return KV.of("", student.getScore());
-            }
-          }))
-          .apply(GroupByKey.create())
-          .apply(MapElements.via(new SimpleFunction<KV<String, Iterable<Integer>>, String>() {
-            @Override
-            public String apply(final KV<String, Iterable<Integer>> kv) {
-              List<Integer> scores = Lists.newArrayList(kv.getValue());
-              final int sum = scores.stream().reduce(0, (Integer x, Integer y) -> x + y);
-              return scores.size() + " " + (double) sum / scores.size();
-            }
-          }));
+        .apply(MapElements.via(new SimpleFunction<Student, KV<String, Integer>>() {
+          @Override
+          public KV<String, Integer> apply(final Student student) {
+            return KV.of("", student.getScore());
+          }
+        }))
+        .apply(GroupByKey.create())
+        .apply(MapElements.via(new SimpleFunction<KV<String, Iterable<Integer>>, String>() {
+          @Override
+          public String apply(final KV<String, Iterable<Integer>> kv) {
+            List<Integer> scores = Lists.newArrayList(kv.getValue());
+            final int sum = scores.stream().reduce(0, (Integer x, Integer y) -> x + y);
+            return scores.size() + " " + (double) sum / scores.size();
+          }
+        }));
       GenericSourceSink.write(results[i], outputFilePath + "_" + i);
     }
 
@@ -106,9 +106,10 @@
 
     /**
      * Constructor.
-     * @param id student id.
+     *
+     * @param id         student id.
      * @param percentile student percentile.
-     * @param score student score.
+     * @param score      student score.
      */
     Student(final int id, final int percentile, final int score) {
       this.id = id;
@@ -118,6 +119,7 @@
 
     /**
      * Getter for student id.
+     *
      * @return id.
      */
     public int getId() {
@@ -126,6 +128,7 @@
 
     /**
      * Setter for student id.
+     *
      * @param id id.
      */
     public void setId(final int id) {
@@ -134,6 +137,7 @@
 
     /**
      * Getter for student percentile.
+     *
      * @return percentile.
      */
     public int getPercentile() {
@@ -142,6 +146,7 @@
 
     /**
      * Setter for student percentile.
+     *
      * @param percentile percentile.
      */
     public void setPercentile(final int percentile) {
@@ -150,6 +155,7 @@
 
     /**
      * Getter for student score.
+     *
      * @return score.
      */
     public int getScore() {
@@ -158,6 +164,7 @@
 
     /**
      * Setter for student score.
+     *
      * @param score score.
      */
     public void setScore(final int score) {
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/WindowedBroadcast.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/WindowedBroadcast.java
index d01a41e..36ce9d1 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/WindowedBroadcast.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/WindowedBroadcast.java
@@ -21,7 +21,9 @@
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.GenerateSequence;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.*;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.View;
 import org.apache.beam.sdk.transforms.windowing.SlidingWindows;
 import org.apache.beam.sdk.transforms.windowing.Window;
 import org.apache.beam.sdk.values.PCollection;
@@ -51,8 +53,10 @@
       .withRate(2, Duration.standardSeconds(1))
       .withTimestampFn(num -> new Instant(num * 500))); // 0.5 second between subsequent elements
   }
+
   /**
    * Main function for the MR BEAM program.
+   *
    * @param args arguments.
    */
   public static void main(final String[] args) {
@@ -60,7 +64,7 @@
 
     final Window<Long> windowFn = Window
       .<Long>into(SlidingWindows.of(Duration.standardSeconds(2))
-      .every(Duration.standardSeconds(1)));
+        .every(Duration.standardSeconds(1)));
 
     final PipelineOptions options = NemoPipelineOptionsFactory.create();
     options.setJobName("WindowedBroadcast");
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/WindowedWordCount.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/WindowedWordCount.java
index eaa3f05..453c4d2 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/WindowedWordCount.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/WindowedWordCount.java
@@ -46,7 +46,7 @@
 
 
   /**
-   * @param p pipeline.
+   * @param p    pipeline.
    * @param args arguments.
    * @return source.
    */
@@ -91,8 +91,10 @@
       throw new RuntimeException("Unsupported input type: " + inputType);
     }
   }
+
   /**
    * Main function for the MR BEAM program.
+   *
    * @param args arguments.
    */
   public static void main(final String[] args) {
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/WordCount.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/WordCount.java
index 6ffaa2e..dcc5485 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/WordCount.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/WordCount.java
@@ -20,7 +20,9 @@
 
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.sdk.transforms.*;
+import org.apache.beam.sdk.transforms.MapElements;
+import org.apache.beam.sdk.transforms.SimpleFunction;
+import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
 
@@ -36,6 +38,7 @@
 
   /**
    * Main function for the MR BEAM program.
+   *
    * @param args arguments.
    */
   public static void main(final String[] args) {
@@ -46,22 +49,22 @@
 
     final Pipeline p = Pipeline.create(options);
     final PCollection<String> result = GenericSourceSink.read(p, inputFilePath)
-        .apply(MapElements.<String, KV<String, Long>>via(new SimpleFunction<String, KV<String, Long>>() {
-          @Override
-          public KV<String, Long> apply(final String line) {
-            final String[] words = line.split(" +");
-            final String documentId = words[0] + "#" + words[1];
-            final Long count = Long.parseLong(words[2]);
-            return KV.of(documentId, count);
-          }
-        }))
-        .apply(Sum.longsPerKey())
-        .apply(MapElements.<KV<String, Long>, String>via(new SimpleFunction<KV<String, Long>, String>() {
-          @Override
-          public String apply(final KV<String, Long> kv) {
-            return kv.getKey() + ": " + kv.getValue();
-          }
-        }));
+      .apply(MapElements.<String, KV<String, Long>>via(new SimpleFunction<String, KV<String, Long>>() {
+        @Override
+        public KV<String, Long> apply(final String line) {
+          final String[] words = line.split(" +");
+          final String documentId = words[0] + "#" + words[1];
+          final Long count = Long.parseLong(words[2]);
+          return KV.of(documentId, count);
+        }
+      }))
+      .apply(Sum.longsPerKey())
+      .apply(MapElements.<KV<String, Long>, String>via(new SimpleFunction<KV<String, Long>, String>() {
+        @Override
+        public String apply(final KV<String, Long> kv) {
+          return kv.getKey() + ": " + kv.getValue();
+        }
+      }));
     GenericSourceSink.write(result, outputFilePath);
     p.run();
   }
diff --git a/examples/beam/src/main/java/org/apache/nemo/examples/beam/WriteOneFilePerWindow.java b/examples/beam/src/main/java/org/apache/nemo/examples/beam/WriteOneFilePerWindow.java
index 8a38b78..2814e6b 100644
--- a/examples/beam/src/main/java/org/apache/nemo/examples/beam/WriteOneFilePerWindow.java
+++ b/examples/beam/src/main/java/org/apache/nemo/examples/beam/WriteOneFilePerWindow.java
@@ -17,6 +17,7 @@
  * under the License.
  */
 package org.apache.nemo.examples.beam;
+
 import org.apache.beam.sdk.io.FileBasedSink;
 import org.apache.beam.sdk.io.TextIO;
 import org.apache.beam.sdk.io.fs.ResolveOptions;
@@ -29,14 +30,14 @@
 import org.apache.beam.sdk.values.PDone;
 import org.joda.time.format.DateTimeFormatter;
 import org.joda.time.format.ISODateTimeFormat;
+
 import javax.annotation.Nullable;
 
 import static com.google.common.base.MoreObjects.firstNonNull;
 
 /**
-  * This class is brought from beam/examples/common/WriteOneFilePerWindow.java.
-  *
-  */
+ * This class is brought from beam/examples/common/WriteOneFilePerWindow.java.
+ */
 public final class WriteOneFilePerWindow extends PTransform<PCollection<String>, PDone> {
   // change from hourMinute to hourMinuteSecond
   private static final DateTimeFormatter FORMATTER = ISODateTimeFormat.hourMinuteSecond();
@@ -44,78 +45,84 @@
   @Nullable
   private Integer numShards;
 
-   /**
-    * Constructor.
-    * @param filenamePrefix file name prefix.
-    * @param numShards number of shards.
-    */
-   public WriteOneFilePerWindow(final String filenamePrefix, final Integer numShards) {
+  /**
+   * Constructor.
+   *
+   * @param filenamePrefix file name prefix.
+   * @param numShards      number of shards.
+   */
+  public WriteOneFilePerWindow(final String filenamePrefix, final Integer numShards) {
     this.filenamePrefix = filenamePrefix;
     this.numShards = numShards;
   }
-   @Override
+
+  @Override
   public PDone expand(final PCollection<String> input) {
     final ResourceId resource = FileBasedSink.convertToFileResourceIfPossible(filenamePrefix);
     TextIO.Write write =
-        TextIO.write()
-            .to(new PerWindowFiles(resource))
-            .withTempDirectory(resource.getCurrentDirectory())
-            .withWindowedWrites();
+      TextIO.write()
+        .to(new PerWindowFiles(resource))
+        .withTempDirectory(resource.getCurrentDirectory())
+        .withWindowedWrites();
     if (numShards != null) {
       write = write.withNumShards(numShards);
     }
     return input.apply(write);
   }
-   /**
+
+  /**
    * A {@link FileBasedSink.FilenamePolicy} produces a base file name for a write based on metadata about the data
    * being written. This always includes the shard number and the total number of shards. For
    * windowed writes, it also includes the window and pane index (a sequence number assigned to each
    * trigger firing).
    */
   public static final class PerWindowFiles extends FileBasedSink.FilenamePolicy {
-     private final ResourceId baseFilename;
+    private final ResourceId baseFilename;
 
-     /**
-      * Constructor.
-      * @param baseFilename base file name.
-      */
-     PerWindowFiles(final ResourceId baseFilename) {
+    /**
+     * Constructor.
+     *
+     * @param baseFilename base file name.
+     */
+    PerWindowFiles(final ResourceId baseFilename) {
       this.baseFilename = baseFilename;
     }
 
-     /**
-      * @param window interval window
-      * @return file name prefix.
-      */
-     String filenamePrefixForWindow(final IntervalWindow window) {
+    /**
+     * @param window interval window
+     * @return file name prefix.
+     */
+    String filenamePrefixForWindow(final IntervalWindow window) {
       final String prefix =
-          baseFilename.isDirectory() ? "" : firstNonNull(baseFilename.getFilename(), "");
+        baseFilename.isDirectory() ? "" : firstNonNull(baseFilename.getFilename(), "");
       return String.format(
-          "%s-%s-%s", prefix, FORMATTER.print(window.start()), FORMATTER.print(window.end()));
+        "%s-%s-%s", prefix, FORMATTER.print(window.start()), FORMATTER.print(window.end()));
     }
-     @Override
+
+    @Override
     public ResourceId windowedFilename(
-        final int shardNumber,
-        final int numShards,
-        final BoundedWindow window,
-        final PaneInfo paneInfo,
-        final FileBasedSink.OutputFileHints outputFileHints) {
+      final int shardNumber,
+      final int numShards,
+      final BoundedWindow window,
+      final PaneInfo paneInfo,
+      final FileBasedSink.OutputFileHints outputFileHints) {
       System.out.println("Windowd file name: " + window);
       final IntervalWindow intervalWindow = (IntervalWindow) window;
       final String filename =
-          String.format(
-              "%s-%s-of-%s%s",
-              filenamePrefixForWindow(intervalWindow),
-              shardNumber,
-              numShards,
-              outputFileHints.getSuggestedFilenameSuffix());
+        String.format(
+          "%s-%s-of-%s%s",
+          filenamePrefixForWindow(intervalWindow),
+          shardNumber,
+          numShards,
+          outputFileHints.getSuggestedFilenameSuffix());
       return baseFilename
-          .getCurrentDirectory()
-          .resolve(filename, ResolveOptions.StandardResolveOptions.RESOLVE_FILE);
+        .getCurrentDirectory()
+        .resolve(filename, ResolveOptions.StandardResolveOptions.RESOLVE_FILE);
     }
-     @Override
+
+    @Override
     public ResourceId unwindowedFilename(
-        final int shardNumber, final int numShards, final FileBasedSink.OutputFileHints outputFileHints) {
+      final int shardNumber, final int numShards, final FileBasedSink.OutputFileHints outputFileHints) {
       throw new UnsupportedOperationException("Unsupported.");
     }
   }
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/AlternatingLeastSquareITCase.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/AlternatingLeastSquareITCase.java
index 095104a..8452bb5 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/AlternatingLeastSquareITCase.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/AlternatingLeastSquareITCase.java
@@ -22,9 +22,7 @@
 import org.apache.nemo.common.test.ArgBuilder;
 import org.apache.nemo.common.test.ExampleTestArgs;
 import org.apache.nemo.common.test.ExampleTestUtil;
-import org.apache.nemo.compiler.optimizer.policy.DefaultPolicy;
 import org.apache.nemo.examples.beam.policy.DefaultPolicyParallelismFive;
-import org.apache.nemo.examples.beam.policy.TransientResourcePolicyParallelismTen;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -52,8 +50,8 @@
   @Before
   public void setUp() throws Exception {
     builder = new ArgBuilder()
-        .addUserMain(AlternatingLeastSquare.class.getCanonicalName())
-        .addUserArgs(input, numFeatures, numIteration, lambda, output);
+      .addUserMain(AlternatingLeastSquare.class.getCanonicalName())
+      .addUserArgs(input, numFeatures, numIteration, lambda, output);
   }
 
   @After
@@ -65,13 +63,13 @@
     }
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testDefault() throws Exception {
     JobLauncher.main(builder
-        .addResourceJson(noPoisonResources)
-        .addJobId(AlternatingLeastSquareITCase.class.getSimpleName() + "_default")
-        .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addResourceJson(noPoisonResources)
+      .addJobId(AlternatingLeastSquareITCase.class.getSimpleName() + "_default")
+      .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 
   // TODO #137: Retry parent task(s) upon task INPUT_READ_FAILURE
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/BeamSimpleSumSQLITCase.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/BeamSimpleSumSQLITCase.java
index ccb1ca0..4e622aa 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/BeamSimpleSumSQLITCase.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/BeamSimpleSumSQLITCase.java
@@ -41,14 +41,14 @@
   private static final String outputFileName = "test_output_simplesql";
   private static final String expectedOutputFileName = "outputs/expected_output_simplesql";
   private static final String executorResourceFileName = ExampleTestArgs.getFileBasePath() + "executors/beam_test_executor_resources.json";
-  private static final String outputFilePath =  ExampleTestArgs.getFileBasePath() + outputFileName;
+  private static final String outputFilePath = ExampleTestArgs.getFileBasePath() + outputFileName;
 
   @Before
   public void setUp() throws Exception {
     builder = new ArgBuilder()
-        .addUserMain(SimpleSumSQL.class.getCanonicalName())
-        .addUserArgs(outputFilePath)
-        .addResourceJson(executorResourceFileName);
+      .addUserMain(SimpleSumSQL.class.getCanonicalName())
+      .addUserArgs(outputFilePath)
+      .addResourceJson(executorResourceFileName);
   }
 
   @After
@@ -60,11 +60,11 @@
     }
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void test() throws Exception {
     JobLauncher.main(builder
-        .addJobId(BeamSimpleSumSQLITCase.class.getSimpleName())
-        .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addJobId(BeamSimpleSumSQLITCase.class.getSimpleName())
+      .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 }
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/BroadcastITCase.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/BroadcastITCase.java
index 425eaa1..36094c9 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/BroadcastITCase.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/BroadcastITCase.java
@@ -43,15 +43,15 @@
   private static final String outputFileName = "test_output_broadcast";
   private static final String expectedOutputFileName = "outputs/expected_output_broadcast";
   private static final String executorResourceFileName = ExampleTestArgs.getFileBasePath() + "executors/beam_test_executor_resources.json";
-  private static final String inputFilePath =  ExampleTestArgs.getFileBasePath() + inputFileName;
-  private static final String outputFilePath =  ExampleTestArgs.getFileBasePath() + outputFileName;
+  private static final String inputFilePath = ExampleTestArgs.getFileBasePath() + inputFileName;
+  private static final String outputFilePath = ExampleTestArgs.getFileBasePath() + outputFileName;
 
   @Before
   public void setUp() throws Exception {
     builder = new ArgBuilder()
-        .addUserMain(Broadcast.class.getCanonicalName())
-        .addUserArgs(inputFilePath, outputFilePath)
-        .addResourceJson(executorResourceFileName);
+      .addUserMain(Broadcast.class.getCanonicalName())
+      .addUserArgs(inputFilePath, outputFilePath)
+      .addResourceJson(executorResourceFileName);
   }
 
   @After
@@ -63,19 +63,19 @@
     }
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void test() throws Exception {
     JobLauncher.main(builder
-        .addJobId(BroadcastITCase.class.getSimpleName())
-        .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addJobId(BroadcastITCase.class.getSimpleName())
+      .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testTransientResource() throws Exception {
     JobLauncher.main(builder
-        .addJobId(BroadcastITCase.class.getSimpleName() + "_transient")
-        .addOptimizationPolicy(TransientResourcePolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addJobId(BroadcastITCase.class.getSimpleName() + "_transient")
+      .addOptimizationPolicy(TransientResourcePolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 }
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/MultinomialLogisticRegressionITCase.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/MultinomialLogisticRegressionITCase.java
index 3d38e5f..68cf03d 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/MultinomialLogisticRegressionITCase.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/MultinomialLogisticRegressionITCase.java
@@ -21,7 +21,6 @@
 import org.apache.nemo.client.JobLauncher;
 import org.apache.nemo.common.test.ArgBuilder;
 import org.apache.nemo.common.test.ExampleTestArgs;
-import org.apache.nemo.compiler.optimizer.policy.DefaultPolicy;
 import org.apache.nemo.examples.beam.policy.DefaultPolicyParallelismFive;
 import org.junit.Before;
 import org.junit.Test;
@@ -43,7 +42,7 @@
     builder = new ArgBuilder();
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void test() throws Exception {
     final String input = ExampleTestArgs.getFileBasePath() + "inputs/test_input_mlr";
     final String numFeatures = "100";
@@ -51,11 +50,11 @@
     final String numIteration = "3";
 
     JobLauncher.main(builder
-        .addJobId(MultinomialLogisticRegressionITCase.class.getSimpleName())
-        .addUserMain(MultinomialLogisticRegression.class.getCanonicalName())
-        .addUserArgs(input, numFeatures, numClasses, numIteration)
-        .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
-        .addResourceJson(executorResourceFileName)
-        .build());
+      .addJobId(MultinomialLogisticRegressionITCase.class.getSimpleName())
+      .addUserMain(MultinomialLogisticRegression.class.getCanonicalName())
+      .addUserArgs(input, numFeatures, numClasses, numIteration)
+      .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
+      .addResourceJson(executorResourceFileName)
+      .build());
   }
 }
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/NetworkTraceAnalysisITCase.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/NetworkTraceAnalysisITCase.java
index 3c031b0..55f7ad5 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/NetworkTraceAnalysisITCase.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/NetworkTraceAnalysisITCase.java
@@ -24,8 +24,8 @@
 import org.apache.nemo.common.test.ExampleTestUtil;
 import org.apache.nemo.examples.beam.policy.DataSkewPolicyParallelismFive;
 import org.apache.nemo.examples.beam.policy.DefaultPolicyParallelismFive;
-import org.apache.nemo.examples.beam.policy.TransientResourcePolicyParallelismFive;
 import org.apache.nemo.examples.beam.policy.LargeShufflePolicyParallelismFive;
+import org.apache.nemo.examples.beam.policy.TransientResourcePolicyParallelismFive;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -43,16 +43,16 @@
   private static final String outputFileName = "test_output_network";
   private static final String expectedOutputFileName = "outputs/expected_output_network";
   private static final String executorResourceFileName = ExampleTestArgs.getFileBasePath() + "executors/beam_test_executor_resources.json";
-  private static final String inputFilePath0 =  ExampleTestArgs.getFileBasePath() + inputFileName0;
-  private static final String inputFilePath1 =  ExampleTestArgs.getFileBasePath() + inputFileName1;
-  private static final String outputFilePath =  ExampleTestArgs.getFileBasePath() + outputFileName;
+  private static final String inputFilePath0 = ExampleTestArgs.getFileBasePath() + inputFileName0;
+  private static final String inputFilePath1 = ExampleTestArgs.getFileBasePath() + inputFileName1;
+  private static final String outputFilePath = ExampleTestArgs.getFileBasePath() + outputFileName;
 
   @Before
   public void setUp() throws Exception {
     builder = new ArgBuilder()
-        .addResourceJson(executorResourceFileName)
-        .addUserMain(NetworkTraceAnalysis.class.getCanonicalName())
-        .addUserArgs(inputFilePath0, inputFilePath1, outputFilePath);
+      .addResourceJson(executorResourceFileName)
+      .addUserMain(NetworkTraceAnalysis.class.getCanonicalName())
+      .addUserArgs(inputFilePath0, inputFilePath1, outputFilePath);
   }
 
   @After
@@ -67,32 +67,33 @@
   @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void test() throws Exception {
     JobLauncher.main(builder
-        .addJobId(NetworkTraceAnalysisITCase.class.getSimpleName())
-        .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addJobId(NetworkTraceAnalysisITCase.class.getSimpleName())
+      .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testLargeShuffle() throws Exception {
     JobLauncher.main(builder
-        .addJobId(NetworkTraceAnalysisITCase.class.getSimpleName() + "_largeshuffle")
-        .addOptimizationPolicy(LargeShufflePolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addJobId(NetworkTraceAnalysisITCase.class.getSimpleName() + "_largeshuffle")
+      .addOptimizationPolicy(LargeShufflePolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testTransientResource() throws Exception {
     JobLauncher.main(builder
-        .addJobId(NetworkTraceAnalysisITCase.class.getSimpleName() + "_transient")
-        .addOptimizationPolicy(TransientResourcePolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addJobId(NetworkTraceAnalysisITCase.class.getSimpleName() + "_transient")
+      .addOptimizationPolicy(TransientResourcePolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 
   /**
    * Testing data skew dynamic optimization.
+   *
    * @throws Exception exception on the way.
    */
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testDataSkew() throws Exception {
     JobLauncher.main(builder
       .addJobId(NetworkTraceAnalysisITCase.class.getSimpleName() + "_skew")
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/PartitionWordsByLengthITCase.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/PartitionWordsByLengthITCase.java
index 0939105..0f0129b 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/PartitionWordsByLengthITCase.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/PartitionWordsByLengthITCase.java
@@ -43,14 +43,14 @@
   private static final String outputFileName = "test_output_tag";
   private static final String expectedOutputFileName = "outputs/expected_output_tag";
   private static final String executorResourceFileName = ExampleTestArgs.getFileBasePath() + "executors/beam_test_executor_resources.json";
-  private static final String inputFilePath =  ExampleTestArgs.getFileBasePath() + inputFileName;
-  private static final String outputFilePath =  ExampleTestArgs.getFileBasePath() + outputFileName;
+  private static final String inputFilePath = ExampleTestArgs.getFileBasePath() + inputFileName;
+  private static final String outputFilePath = ExampleTestArgs.getFileBasePath() + outputFileName;
 
   @Before
   public void setUp() throws Exception {
     builder = new ArgBuilder()
-        .addUserMain(PartitionWordsByLength.class.getCanonicalName())
-        .addUserArgs(inputFilePath, outputFilePath);
+      .addUserMain(PartitionWordsByLength.class.getCanonicalName())
+      .addUserArgs(inputFilePath, outputFilePath);
   }
 
   @After
@@ -65,21 +65,21 @@
     }
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testLargeShuffle() throws Exception {
     JobLauncher.main(builder
-        .addResourceJson(executorResourceFileName)
-        .addJobId(PartitionWordsByLengthITCase.class.getSimpleName() + "_largeshuffle")
-        .addOptimizationPolicy(LargeShufflePolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addResourceJson(executorResourceFileName)
+      .addJobId(PartitionWordsByLengthITCase.class.getSimpleName() + "_largeshuffle")
+      .addOptimizationPolicy(LargeShufflePolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void test() throws Exception {
     JobLauncher.main(builder
-        .addResourceJson(executorResourceFileName)
-        .addJobId(PartitionWordsByLengthITCase.class.getSimpleName())
-        .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addResourceJson(executorResourceFileName)
+      .addJobId(PartitionWordsByLengthITCase.class.getSimpleName())
+      .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 }
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/PerKeyMedianITCase.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/PerKeyMedianITCase.java
index 7ee9dd6..52bde1a 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/PerKeyMedianITCase.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/PerKeyMedianITCase.java
@@ -43,15 +43,15 @@
   private static final String outputFileName = "test_output_median";
   private static final String expectedOutputFileName = "outputs/expected_output_median";
   private static final String executorResourceFileName = ExampleTestArgs.getFileBasePath() + "executors/beam_test_executor_resources.json";
-  private static final String inputFilePath =  ExampleTestArgs.getFileBasePath() + inputFileName;
-  private static final String outputFilePath =  ExampleTestArgs.getFileBasePath() + outputFileName;
+  private static final String inputFilePath = ExampleTestArgs.getFileBasePath() + inputFileName;
+  private static final String outputFilePath = ExampleTestArgs.getFileBasePath() + outputFileName;
 
   @Before
   public void setUp() throws Exception {
     builder = new ArgBuilder()
-        .addResourceJson(executorResourceFileName)
-        .addUserMain(PerKeyMedian.class.getCanonicalName())
-        .addUserArgs(inputFilePath, outputFilePath);
+      .addResourceJson(executorResourceFileName)
+      .addUserMain(PerKeyMedian.class.getCanonicalName())
+      .addUserArgs(inputFilePath, outputFilePath);
   }
 
   @After
@@ -65,21 +65,23 @@
 
   /**
    * Testing data skew dynamic optimization.
+   *
    * @throws Exception exception on the way.
    */
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testDataSkew() throws Exception {
     JobLauncher.main(builder
-        .addJobId(PerKeyMedianITCase.class.getSimpleName())
-        .addOptimizationPolicy(DataSkewPolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addJobId(PerKeyMedianITCase.class.getSimpleName())
+      .addOptimizationPolicy(DataSkewPolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 
   /**
    * Testing large shuffle and data skew dynamic optimization.
+   *
    * @throws Exception exception on the way.
    */
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testLargeShuffleSamplingSkew() throws Exception {
     JobLauncher.main(builder
       .addJobId(PerKeyMedianITCase.class.getSimpleName() + "_LargeShuffleSamplingSkew")
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/PerPercentileAverageITCase.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/PerPercentileAverageITCase.java
index b43a939..c3482b7 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/PerPercentileAverageITCase.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/PerPercentileAverageITCase.java
@@ -42,8 +42,8 @@
   private static final String outputFileName = "test_output_partition";
   private static final String expectedOutputFileName = "outputs/expected_output_partition";
   private static final String executorResourceFileName = ExampleTestArgs.getFileBasePath() + "executors/beam_test_executor_resources.json";
-  private static final String inputFilePath =  ExampleTestArgs.getFileBasePath() + inputFileName;
-  private static final String outputFilePath =  ExampleTestArgs.getFileBasePath() + outputFileName;
+  private static final String inputFilePath = ExampleTestArgs.getFileBasePath() + inputFileName;
+  private static final String outputFilePath = ExampleTestArgs.getFileBasePath() + outputFileName;
 
   @Before
   public void setUp() throws Exception {
@@ -58,15 +58,15 @@
     try {
       for (int i = 0; i < 10; i++) {
         ExampleTestUtil.ensureOutputValidity(ExampleTestArgs.getFileBasePath(),
-            outputFileName + "_" + i,
-            expectedOutputFileName + "_" + i);
+          outputFileName + "_" + i,
+          expectedOutputFileName + "_" + i);
       }
     } finally {
       ExampleTestUtil.deleteOutputFile(ExampleTestArgs.getFileBasePath(), outputFileName);
     }
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void test() throws Exception {
     JobLauncher.main(builder
       .addJobId(PerPercentileAverage.class.getSimpleName())
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/WindowedBroadcastITCase.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/WindowedBroadcastITCase.java
index 5e2fba3..06a72f4 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/WindowedBroadcastITCase.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/WindowedBroadcastITCase.java
@@ -22,7 +22,6 @@
 import org.apache.nemo.common.test.ArgBuilder;
 import org.apache.nemo.common.test.ExampleTestUtil;
 import org.apache.nemo.examples.beam.policy.StreamingPolicyParallelismFive;
-import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
@@ -43,7 +42,7 @@
   private static final String expectedOutputFileName = "expected_output_windowed_broadcast";
   private static final String expectedSlidingWindowOutputFileName = "expected_output_sliding_windowed_broadcast";
   private static final String executorResourceFileName = fileBasePath + "beam_test_executor_resources.json";
-  private static final String outputFilePath =  fileBasePath + outputFileName;
+  private static final String outputFilePath = fileBasePath + outputFileName;
 
   // TODO #271: We currently disable this test because we cannot force close Nemo
   // @Test (timeout = TIMEOUT)
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/WindowedWordCountITCase.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/WindowedWordCountITCase.java
index 97dafe5..8f37760 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/WindowedWordCountITCase.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/WindowedWordCountITCase.java
@@ -23,7 +23,8 @@
 import org.apache.nemo.common.test.ExampleTestArgs;
 import org.apache.nemo.common.test.ExampleTestUtil;
 import org.apache.nemo.compiler.optimizer.policy.DefaultPolicy;
-import org.apache.nemo.examples.beam.policy.*;
+import org.apache.nemo.examples.beam.policy.DefaultPolicyParallelismFive;
+import org.apache.nemo.examples.beam.policy.StreamingPolicyParallelismFive;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.powermock.core.classloader.annotations.PrepareForTest;
@@ -46,20 +47,20 @@
   private static final String expectedOutputFileName = "outputs/expected_output_windowed_wordcount";
   private static final String expectedSlidingWindowOutputFileName = "outputs/expected_output_sliding_windowed_wordcount";
   private static final String executorResourceFileName = ExampleTestArgs.getFileBasePath() + "executors/beam_test_executor_resources.json";
-  private static final String inputFilePath =  ExampleTestArgs.getFileBasePath() + inputFileName;
-  private static final String outputFilePath =  ExampleTestArgs.getFileBasePath() + outputFileName;
+  private static final String inputFilePath = ExampleTestArgs.getFileBasePath() + inputFileName;
+  private static final String outputFilePath = ExampleTestArgs.getFileBasePath() + outputFileName;
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testBatchFixedWindow() throws Exception {
     builder = new ArgBuilder()
       .addUserMain(WindowedWordCount.class.getCanonicalName())
       .addUserArgs(outputFilePath, "fixed", INPUT_TYPE_BOUNDED, inputFilePath);
 
     JobLauncher.main(builder
-        .addResourceJson(executorResourceFileName)
-        .addJobId(WindowedWordCountITCase.class.getSimpleName() + "testBatchFixedWindow")
-        .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addResourceJson(executorResourceFileName)
+      .addJobId(WindowedWordCountITCase.class.getSimpleName() + "testBatchFixedWindow")
+      .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
+      .build());
 
     try {
       ExampleTestUtil.ensureOutputValidity(ExampleTestArgs.getFileBasePath(), outputFileName, expectedOutputFileName);
@@ -69,7 +70,7 @@
   }
 
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testBatchSlidingWindow() throws Exception {
     builder = new ArgBuilder()
       .addUserMain(WindowedWordCount.class.getCanonicalName())
@@ -88,7 +89,7 @@
     }
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testStreamingSchedulerAndPipeFixedWindow() throws Exception {
     builder = new ArgBuilder()
       .addScheduler("org.apache.nemo.runtime.master.scheduler.StreamingScheduler")
@@ -109,7 +110,7 @@
   }
 
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testStreamingSchedulerAndPipeSlidingWindow() throws Exception {
     builder = new ArgBuilder()
       .addScheduler("org.apache.nemo.runtime.master.scheduler.StreamingScheduler")
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/WordCountITCase.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/WordCountITCase.java
index e31a8c5..69c6ffc 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/WordCountITCase.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/WordCountITCase.java
@@ -44,14 +44,14 @@
   private static final String expectedOutputFileName = "outputs/expected_output_wordcount";
   private static final String executorResourceFileName = ExampleTestArgs.getFileBasePath() + "executors/beam_test_executor_resources.json";
   private static final String oneExecutorResourceFileName = ExampleTestArgs.getFileBasePath() + "executors/beam_test_one_executor_resources.json";
-  private static final String inputFilePath =  ExampleTestArgs.getFileBasePath() + inputFileName;
-  private static final String outputFilePath =  ExampleTestArgs.getFileBasePath() + outputFileName;
+  private static final String inputFilePath = ExampleTestArgs.getFileBasePath() + inputFileName;
+  private static final String outputFilePath = ExampleTestArgs.getFileBasePath() + outputFileName;
 
   @Before
   public void setUp() throws Exception {
     builder = new ArgBuilder()
-        .addUserMain(WordCount.class.getCanonicalName())
-        .addUserArgs(inputFilePath, outputFilePath);
+      .addUserMain(WordCount.class.getCanonicalName())
+      .addUserArgs(inputFilePath, outputFilePath);
   }
 
   @After
@@ -63,62 +63,62 @@
     }
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void test() throws Exception {
     JobLauncher.main(builder
-        .addResourceJson(executorResourceFileName)
-        .addJobId(WordCountITCase.class.getSimpleName())
-        .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addResourceJson(executorResourceFileName)
+      .addJobId(WordCountITCase.class.getSimpleName())
+      .addOptimizationPolicy(DefaultPolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testLargeShuffle() throws Exception {
     JobLauncher.main(builder
-        .addResourceJson(executorResourceFileName)
-        .addJobId(WordCountITCase.class.getSimpleName() + "_largeShuffle")
-        .addOptimizationPolicy(LargeShufflePolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addResourceJson(executorResourceFileName)
+      .addJobId(WordCountITCase.class.getSimpleName() + "_largeShuffle")
+      .addOptimizationPolicy(LargeShufflePolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testLargeShuffleInOneExecutor() throws Exception {
     JobLauncher.main(builder
-        .addResourceJson(oneExecutorResourceFileName)
-        .addJobId(WordCountITCase.class.getSimpleName() + "_largeshuffleInOneExecutor")
-        .addOptimizationPolicy(LargeShufflePolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addResourceJson(oneExecutorResourceFileName)
+      .addJobId(WordCountITCase.class.getSimpleName() + "_largeshuffleInOneExecutor")
+      .addOptimizationPolicy(LargeShufflePolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testConditionalLargeShuffle() throws Exception {
     JobLauncher.main(builder
-        .addResourceJson(executorResourceFileName)
-        .addJobId(WordCountITCase.class.getSimpleName() + "_conditionalLargeShuffle")
-        .addOptimizationPolicy(ConditionalLargeShufflePolicy.class.getCanonicalName())
-        .build());
+      .addResourceJson(executorResourceFileName)
+      .addJobId(WordCountITCase.class.getSimpleName() + "_conditionalLargeShuffle")
+      .addOptimizationPolicy(ConditionalLargeShufflePolicy.class.getCanonicalName())
+      .build());
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testTransientResource() throws Exception {
     JobLauncher.main(builder
-        .addResourceJson(executorResourceFileName)
-        .addJobId(WordCountITCase.class.getSimpleName() + "_transient")
-        .addOptimizationPolicy(TransientResourcePolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addResourceJson(executorResourceFileName)
+      .addJobId(WordCountITCase.class.getSimpleName() + "_transient")
+      .addOptimizationPolicy(TransientResourcePolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testClonedScheduling() throws Exception {
     JobLauncher.main(builder
-        .addResourceJson(executorResourceFileName)
-        .addJobId(WordCountITCase.class.getSimpleName() + "_clonedscheduling")
-        .addMaxTaskAttempt(Integer.MAX_VALUE)
-        .addOptimizationPolicy(UpfrontSchedulingPolicyParallelismFive.class.getCanonicalName())
-        .build());
+      .addResourceJson(executorResourceFileName)
+      .addJobId(WordCountITCase.class.getSimpleName() + "_clonedscheduling")
+      .addMaxTaskAttempt(Integer.MAX_VALUE)
+      .addOptimizationPolicy(UpfrontSchedulingPolicyParallelismFive.class.getCanonicalName())
+      .build());
   }
 
-  @Test (timeout = ExampleTestArgs.TIMEOUT)
+  @Test(timeout = ExampleTestArgs.TIMEOUT)
   public void testSpeculativeExecution() throws Exception {
     JobLauncher.main(builder
       .addResourceJson(executorResourceFileName)
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/AggressiveSpeculativeCloningPolicyParallelismFive.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/AggressiveSpeculativeCloningPolicyParallelismFive.java
index 1913235..0bc7f3d 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/AggressiveSpeculativeCloningPolicyParallelismFive.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/AggressiveSpeculativeCloningPolicyParallelismFive.java
@@ -33,17 +33,20 @@
  */
 public final class AggressiveSpeculativeCloningPolicyParallelismFive implements Policy {
   private final Policy policy;
+
   public AggressiveSpeculativeCloningPolicyParallelismFive() {
     final List<CompileTimePass> overwritingPasses = DefaultPolicy.BUILDER.getCompileTimePasses();
     overwritingPasses.add(new AggressiveSpeculativeCloningPass()); // CLONING!
     this.policy = new PolicyImpl(
-        PolicyTestUtil.overwriteParallelism(5, overwritingPasses),
-        DefaultPolicy.BUILDER.getRunTimePasses());
+      PolicyTestUtil.overwriteParallelism(5, overwritingPasses),
+      DefaultPolicy.BUILDER.getRunTimePasses());
   }
+
   @Override
   public IRDAG runCompileTimeOptimization(final IRDAG dag, final String dagDirectory) {
     return this.policy.runCompileTimeOptimization(dag, dagDirectory);
   }
+
   @Override
   public IRDAG runRunTimeOptimizations(final IRDAG dag, final Message<?> message) {
     return this.policy.runRunTimeOptimizations(dag, message);
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/DataSkewPolicyParallelismFive.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/DataSkewPolicyParallelismFive.java
index 7e80937..9702077 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/DataSkewPolicyParallelismFive.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/DataSkewPolicyParallelismFive.java
@@ -32,8 +32,8 @@
 
   public DataSkewPolicyParallelismFive() {
     this.policy = new PolicyImpl(
-        PolicyTestUtil.overwriteParallelism(5, DataSkewPolicy.BUILDER.getCompileTimePasses()),
-        DataSkewPolicy.BUILDER.getRunTimePasses());
+      PolicyTestUtil.overwriteParallelism(5, DataSkewPolicy.BUILDER.getCompileTimePasses()),
+      DataSkewPolicy.BUILDER.getRunTimePasses());
   }
 
   @Override
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/DefaultPolicyParallelismFive.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/DefaultPolicyParallelismFive.java
index 29cab44..e3892dc 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/DefaultPolicyParallelismFive.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/DefaultPolicyParallelismFive.java
@@ -32,8 +32,8 @@
 
   public DefaultPolicyParallelismFive() {
     this.policy = new PolicyImpl(
-        PolicyTestUtil.overwriteParallelism(5, DefaultPolicy.BUILDER.getCompileTimePasses()),
-        DefaultPolicy.BUILDER.getRunTimePasses());
+      PolicyTestUtil.overwriteParallelism(5, DefaultPolicy.BUILDER.getCompileTimePasses()),
+      DefaultPolicy.BUILDER.getRunTimePasses());
   }
 
   @Override
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/DisaggregationPolicyParallelismFive.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/DisaggregationPolicyParallelismFive.java
index fdacfa6..64bce69 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/DisaggregationPolicyParallelismFive.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/DisaggregationPolicyParallelismFive.java
@@ -32,9 +32,9 @@
 
   public DisaggregationPolicyParallelismFive() {
     this.policy = new PolicyImpl(
-        PolicyTestUtil.overwriteParallelism(5,
-            DisaggregationPolicy.BUILDER.getCompileTimePasses()),
-        DisaggregationPolicy.BUILDER.getRunTimePasses());
+      PolicyTestUtil.overwriteParallelism(5,
+        DisaggregationPolicy.BUILDER.getCompileTimePasses()),
+      DisaggregationPolicy.BUILDER.getRunTimePasses());
   }
 
   @Override
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/LargeShufflePolicyParallelismFive.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/LargeShufflePolicyParallelismFive.java
index 679c6f1..b04821c 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/LargeShufflePolicyParallelismFive.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/LargeShufflePolicyParallelismFive.java
@@ -32,8 +32,8 @@
 
   public LargeShufflePolicyParallelismFive() {
     this.policy = new PolicyImpl(
-        PolicyTestUtil.overwriteParallelism(5, LargeShufflePolicy.BUILDER.getCompileTimePasses()),
-        LargeShufflePolicy.BUILDER.getRunTimePasses());
+      PolicyTestUtil.overwriteParallelism(5, LargeShufflePolicy.BUILDER.getCompileTimePasses()),
+      LargeShufflePolicy.BUILDER.getRunTimePasses());
   }
 
   @Override
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/PolicyTestUtil.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/PolicyTestUtil.java
index 3e090a2..60c3aec 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/PolicyTestUtil.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/PolicyTestUtil.java
@@ -30,15 +30,15 @@
   /**
    * Overwrite the parallelism of existing policy.
    *
-   * @param desiredSourceParallelism       the desired source parallelism to set.
-   * @param compileTimePassesToOverwrite   the list of compile time passes to overwrite.
+   * @param desiredSourceParallelism     the desired source parallelism to set.
+   * @param compileTimePassesToOverwrite the list of compile time passes to overwrite.
    * @return the overwritten policy.
    */
   public static List<CompileTimePass> overwriteParallelism(final int desiredSourceParallelism,
-                                            final List<CompileTimePass> compileTimePassesToOverwrite) {
+                                                           final List<CompileTimePass> compileTimePassesToOverwrite) {
     final int parallelismPassIdx = compileTimePassesToOverwrite.indexOf(new DefaultParallelismPass());
     compileTimePassesToOverwrite.set(parallelismPassIdx,
-        new DefaultParallelismPass(desiredSourceParallelism, 2));
+      new DefaultParallelismPass(desiredSourceParallelism, 2));
     return compileTimePassesToOverwrite;
   }
 }
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/TransientResourcePolicyParallelismFive.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/TransientResourcePolicyParallelismFive.java
index f3a2915..31aabc5 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/TransientResourcePolicyParallelismFive.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/TransientResourcePolicyParallelismFive.java
@@ -20,9 +20,9 @@
 
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.compiler.optimizer.pass.runtime.Message;
+import org.apache.nemo.compiler.optimizer.policy.Policy;
 import org.apache.nemo.compiler.optimizer.policy.PolicyImpl;
 import org.apache.nemo.compiler.optimizer.policy.TransientResourcePolicy;
-import org.apache.nemo.compiler.optimizer.policy.Policy;
 
 /**
  * A transient resource policy with fixed parallelism 5 for tests.
@@ -32,9 +32,9 @@
 
   public TransientResourcePolicyParallelismFive() {
     this.policy = new PolicyImpl(
-        PolicyTestUtil.overwriteParallelism(5,
-            TransientResourcePolicy.BUILDER.getCompileTimePasses()),
-        TransientResourcePolicy.BUILDER.getRunTimePasses());
+      PolicyTestUtil.overwriteParallelism(5,
+        TransientResourcePolicy.BUILDER.getCompileTimePasses()),
+      TransientResourcePolicy.BUILDER.getRunTimePasses());
   }
 
   @Override
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/TransientResourcePolicyParallelismTen.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/TransientResourcePolicyParallelismTen.java
index 51065be..62fa4df 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/TransientResourcePolicyParallelismTen.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/TransientResourcePolicyParallelismTen.java
@@ -20,9 +20,9 @@
 
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.compiler.optimizer.pass.runtime.Message;
+import org.apache.nemo.compiler.optimizer.policy.Policy;
 import org.apache.nemo.compiler.optimizer.policy.PolicyImpl;
 import org.apache.nemo.compiler.optimizer.policy.TransientResourcePolicy;
-import org.apache.nemo.compiler.optimizer.policy.Policy;
 
 /**
  * A transient resource policy with fixed parallelism 10 for tests.
@@ -32,9 +32,9 @@
 
   public TransientResourcePolicyParallelismTen() {
     this.policy = new PolicyImpl(
-        PolicyTestUtil.overwriteParallelism(10,
-            TransientResourcePolicy.BUILDER.getCompileTimePasses()),
-        TransientResourcePolicy.BUILDER.getRunTimePasses());
+      PolicyTestUtil.overwriteParallelism(10,
+        TransientResourcePolicy.BUILDER.getCompileTimePasses()),
+      TransientResourcePolicy.BUILDER.getRunTimePasses());
   }
 
   @Override
diff --git a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/UpfrontSchedulingPolicyParallelismFive.java b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/UpfrontSchedulingPolicyParallelismFive.java
index d70d51b..6846d63 100644
--- a/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/UpfrontSchedulingPolicyParallelismFive.java
+++ b/examples/beam/src/test/java/org/apache/nemo/examples/beam/policy/UpfrontSchedulingPolicyParallelismFive.java
@@ -25,6 +25,7 @@
 import org.apache.nemo.compiler.optimizer.policy.DefaultPolicy;
 import org.apache.nemo.compiler.optimizer.policy.Policy;
 import org.apache.nemo.compiler.optimizer.policy.PolicyImpl;
+
 import java.util.List;
 
 /**
@@ -32,17 +33,20 @@
  */
 public final class UpfrontSchedulingPolicyParallelismFive implements Policy {
   private final Policy policy;
+
   public UpfrontSchedulingPolicyParallelismFive() {
     final List<CompileTimePass> overwritingPasses = DefaultPolicy.BUILDER.getCompileTimePasses();
     overwritingPasses.add(new UpfrontCloningPass()); // CLONING!
     this.policy = new PolicyImpl(
-        PolicyTestUtil.overwriteParallelism(5, overwritingPasses),
-        DefaultPolicy.BUILDER.getRunTimePasses());
+      PolicyTestUtil.overwriteParallelism(5, overwritingPasses),
+      DefaultPolicy.BUILDER.getRunTimePasses());
   }
+
   @Override
   public IRDAG runCompileTimeOptimization(final IRDAG dag, final String dagDirectory) {
     return this.policy.runCompileTimeOptimization(dag, dagDirectory);
   }
+
   @Override
   public IRDAG runRunTimeOptimizations(final IRDAG dag, final Message<?> message) {
     return this.policy.runRunTimeOptimizations(dag, message);
diff --git a/examples/pom.xml b/examples/pom.xml
index 8be9bd1..f0c478b 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -17,7 +17,8 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <parent>
     <artifactId>nemo-project</artifactId>
     <groupId>org.apache.nemo</groupId>
diff --git a/examples/spark/pom.xml b/examples/spark/pom.xml
index 3db745a..d3e122c 100644
--- a/examples/spark/pom.xml
+++ b/examples/spark/pom.xml
@@ -17,139 +17,140 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <artifactId>nemo-examples</artifactId>
-        <groupId>org.apache.nemo</groupId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
+  <parent>
+    <artifactId>nemo-examples</artifactId>
+    <groupId>org.apache.nemo</groupId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
 
-    <artifactId>nemo-examples-spark</artifactId>
-    <name>Nemo Examples: Spark</name>
+  <artifactId>nemo-examples-spark</artifactId>
+  <name>Nemo Examples: Spark</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-compiler-frontend-spark</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.github.fommil.netlib</groupId>
-            <artifactId>all</artifactId>
-            <version>${netlib.version}</version>
-            <type>pom</type>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-core</artifactId>
-            <version>${hadoop.version}</version>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <version>${hadoop.version}</version>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.slf4j</groupId>
-                    <artifactId>slf4j-api</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.slf4j</groupId>
-                    <artifactId>slf4j-log4j12</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-compiler-frontend-spark</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.github.fommil.netlib</groupId>
+      <artifactId>all</artifactId>
+      <version>${netlib.version}</version>
+      <type>pom</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-log4j12</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+  </dependencies>
 
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-deploy-plugin</artifactId>
-                <version>2.8.2</version>
-                <configuration>
-                    <skip>true</skip>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-                <version>3.0.0</version>
-                <executions>
-                    <execution>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>shade</goal>
-                        </goals>
-                        <configuration>
-                            <outputFile>
-                                ${project.build.directory}/${project.artifactId}-${project.version}-shaded.jar
-                            </outputFile>
-                            <transformers>
-                                <!-- Required for using beam-hadoop: See https://stackoverflow.com/questions/44365545
-                                -->
-                                <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer" />
-                            </transformers>
-                            <filters>
-                                <filter>
-                                    <artifact>*:*</artifact>
-                                    <excludes>
-                                        <exclude>META-INF/*.SF</exclude>
-                                        <exclude>META-INF/*.DSA</exclude>
-                                        <exclude>META-INF/*.RSA</exclude>
-                                    </excludes>
-                                </filter>
-                            </filters>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>net.alchim31.maven</groupId>
-                <artifactId>scala-maven-plugin</artifactId>
-                <version>3.3.1</version>
-                <configuration>
-                    <recompileMode>incremental</recompileMode>
-                    <javacArgs>
-                        <javacArg>-Xlint:unchecked</javacArg>
-                        <javacArg>-Xlint:deprecation</javacArg>
-                    </javacArgs>
-                </configuration>
-                <executions>
-                    <execution>
-                        <id>scala-compile-first</id>
-                        <phase>process-resources</phase>
-                        <goals>
-                            <goal>add-source</goal>
-                            <goal>compile</goal>
-                        </goals>
-                    </execution>
-                    <execution>
-                        <id>scala-test-compile</id>
-                        <phase>process-test-resources</phase>
-                        <goals>
-                            <goal>testCompile</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <version>2.0.2</version>
-                <executions>
-                    <execution>
-                        <phase>compile</phase>
-                        <goals>
-                            <goal>compile</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-deploy-plugin</artifactId>
+        <version>2.8.2</version>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+        <version>3.0.0</version>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>shade</goal>
+            </goals>
+            <configuration>
+              <outputFile>
+                ${project.build.directory}/${project.artifactId}-${project.version}-shaded.jar
+              </outputFile>
+              <transformers>
+                <!-- Required for using beam-hadoop: See https://stackoverflow.com/questions/44365545
+                -->
+                <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"/>
+              </transformers>
+              <filters>
+                <filter>
+                  <artifact>*:*</artifact>
+                  <excludes>
+                    <exclude>META-INF/*.SF</exclude>
+                    <exclude>META-INF/*.DSA</exclude>
+                    <exclude>META-INF/*.RSA</exclude>
+                  </excludes>
+                </filter>
+              </filters>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>net.alchim31.maven</groupId>
+        <artifactId>scala-maven-plugin</artifactId>
+        <version>3.3.1</version>
+        <configuration>
+          <recompileMode>incremental</recompileMode>
+          <javacArgs>
+            <javacArg>-Xlint:unchecked</javacArg>
+            <javacArg>-Xlint:deprecation</javacArg>
+          </javacArgs>
+        </configuration>
+        <executions>
+          <execution>
+            <id>scala-compile-first</id>
+            <phase>process-resources</phase>
+            <goals>
+              <goal>add-source</goal>
+              <goal>compile</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>scala-test-compile</id>
+            <phase>process-test-resources</phase>
+            <goals>
+              <goal>testCompile</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <executions>
+          <execution>
+            <phase>compile</phase>
+            <goals>
+              <goal>compile</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
 </project>
diff --git a/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaMapReduce.java b/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaMapReduce.java
index 23e83a6..2e0ba47 100644
--- a/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaMapReduce.java
+++ b/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaMapReduce.java
@@ -18,9 +18,9 @@
  */
 package org.apache.nemo.examples.spark;
 
+import org.apache.nemo.compiler.frontend.spark.core.JavaSparkContext;
 import org.apache.nemo.compiler.frontend.spark.core.rdd.JavaPairRDD;
 import org.apache.nemo.compiler.frontend.spark.core.rdd.JavaRDD;
-import org.apache.nemo.compiler.frontend.spark.core.JavaSparkContext;
 import org.apache.nemo.compiler.frontend.spark.sql.SparkSession;
 import scala.Tuple2;
 
@@ -37,6 +37,7 @@
 
   /**
    * Main method.
+   *
    * @param args arguments.
    * @throws Exception exceptions.
    */
@@ -49,13 +50,13 @@
     final boolean yarn = args.length > 3 && Boolean.parseBoolean(args[3]);
 
     final SparkSession.Builder sparkBuilder = SparkSession
-        .builder()
-        .appName("JavaMapReduce");
+      .builder()
+      .appName("JavaMapReduce");
     if (yarn) {
       sparkBuilder
-          .config("mapreduce.input.fileinputformat.input.dir.recursive", "true")
-          .master("yarn")
-          .config("spark.submit.deployMode", "cluster");
+        .config("mapreduce.input.fileinputformat.input.dir.recursive", "true")
+        .master("yarn")
+        .config("spark.submit.deployMode", "cluster");
     }
     final SparkSession spark = sparkBuilder.getOrCreate();
 
@@ -65,15 +66,15 @@
     final JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
     final JavaRDD<String> data = jsc.textFile(input, parallelism);
     final JavaPairRDD<String, Long> documentToCount = data
-        .mapToPair(line -> {
-          final String[] words = line.split(" +");
-          final String documentId = words[0] + "#" + words[1];
-          final long count = Long.parseLong(words[2]);
-          return new Tuple2<>(documentId, count);
-        });
+      .mapToPair(line -> {
+        final String[] words = line.split(" +");
+        final String documentId = words[0] + "#" + words[1];
+        final long count = Long.parseLong(words[2]);
+        return new Tuple2<>(documentId, count);
+      });
     final JavaRDD<String> documentToSum = documentToCount
-        .reduceByKey((i1, i2) -> i1 + i2)
-        .map(t -> t._1() + ": " + t._2());
+      .reduceByKey((i1, i2) -> i1 + i2)
+      .map(t -> t._1() + ": " + t._2());
     documentToSum.saveAsTextFile(output);
 
     // DONE
diff --git a/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaSparkPi.java b/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaSparkPi.java
index f7684e5..0e716ea 100644
--- a/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaSparkPi.java
+++ b/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaSparkPi.java
@@ -18,8 +18,8 @@
  */
 package org.apache.nemo.examples.spark;
 
-import org.apache.nemo.compiler.frontend.spark.core.rdd.JavaRDD;
 import org.apache.nemo.compiler.frontend.spark.core.JavaSparkContext;
+import org.apache.nemo.compiler.frontend.spark.core.rdd.JavaRDD;
 import org.apache.nemo.compiler.frontend.spark.sql.SparkSession;
 
 import java.util.ArrayList;
@@ -39,14 +39,15 @@
 
   /**
    * Main method.
+   *
    * @param args arguments.
    * @throws Exception exceptions.
    */
   public static void main(final String[] args) throws Exception {
     SparkSession spark = SparkSession
-        .builder()
-        .appName("JavaSparkPi")
-        .getOrCreate();
+      .builder()
+      .appName("JavaSparkPi")
+      .getOrCreate();
 
     final JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
 
diff --git a/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaWordAndLineCount.java b/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaWordAndLineCount.java
index e9e85a9..8263331 100644
--- a/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaWordAndLineCount.java
+++ b/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaWordAndLineCount.java
@@ -44,6 +44,7 @@
 
   /**
    * Main method.
+   *
    * @param args arguments.
    * @throws Exception exceptions.
    */
@@ -55,9 +56,9 @@
     }
 
     SparkSession spark = SparkSession
-        .builder()
-        .appName("JavaWordAndLineCount")
-        .getOrCreate();
+      .builder()
+      .appName("JavaWordAndLineCount")
+      .getOrCreate();
 
     JavaRDD<String> lines = spark.read().textFile(args[0]).javaRDD();
 
diff --git a/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaWordCount.java b/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaWordCount.java
index 5d93876..ce9cadd 100644
--- a/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaWordCount.java
+++ b/examples/spark/src/main/java/org/apache/nemo/examples/spark/JavaWordCount.java
@@ -44,6 +44,7 @@
 
   /**
    * Main method.
+   *
    * @param args arguments.
    * @throws Exception exceptions.
    */
@@ -55,9 +56,9 @@
     }
 
     SparkSession spark = SparkSession
-        .builder()
-        .appName("JavaWordCount")
-        .getOrCreate();
+      .builder()
+      .appName("JavaWordCount")
+      .getOrCreate();
 
     JavaRDD<String> lines = spark.read().textFile(args[0]).javaRDD();
 
diff --git a/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaSparkSQLExample.java b/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaSparkSQLExample.java
index cf6bc96..5e9099b 100644
--- a/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaSparkSQLExample.java
+++ b/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaSparkSQLExample.java
@@ -18,12 +18,6 @@
  */
 package org.apache.nemo.examples.spark.sql;
 
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Arrays;
-import java.util.Collections;
-import java.io.Serializable;
-
 import org.apache.nemo.compiler.frontend.spark.core.rdd.JavaRDD;
 import org.apache.nemo.compiler.frontend.spark.sql.Dataset;
 import org.apache.nemo.compiler.frontend.spark.sql.SparkSession;
@@ -34,12 +28,19 @@
 import org.apache.spark.sql.types.StructField;
 import org.apache.spark.sql.types.StructType;
 
-// col("...") is preferable to df.col("...")
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
 import static org.apache.spark.sql.functions.col;
 
+// col("...") is preferable to df.col("...")
+
 /**
  * Java Spark SQL Example program.
- *
+ * <p>
  * This code has been copied from the Apache Spark (https://github.com/apache/spark) to demonstrate a spark example.
  */
 public final class JavaSparkSQLExample {
@@ -59,6 +60,7 @@
 
     /**
      * Getter.
+     *
      * @return name.
      */
     public String getName() {
@@ -67,6 +69,7 @@
 
     /**
      * Setter.
+     *
      * @param name name.
      */
     public void setName(final String name) {
@@ -75,6 +78,7 @@
 
     /**
      * Getter.
+     *
      * @return age.
      */
     public int getAge() {
@@ -83,6 +87,7 @@
 
     /**
      * Setter.
+     *
      * @param age age.
      */
     public void setAge(final int age) {
@@ -92,15 +97,16 @@
 
   /**
    * Main function.
+   *
    * @param args arguments.
    * @throws AnalysisException Exception.
    */
   public static void main(final String[] args) throws AnalysisException {
     SparkSession spark = SparkSession
-        .builder()
-        .appName("Java Spark SQL basic example")
-        .config("spark.some.config.option", "some-value")
-        .getOrCreate();
+      .builder()
+      .appName("Java Spark SQL basic example")
+      .config("spark.some.config.option", "some-value")
+      .getOrCreate();
 
     runBasicDataFrameExample(spark, args[0]);
     runDatasetCreationExample(spark, args[0]);
@@ -112,12 +118,13 @@
 
   /**
    * Function to run basic data frame example.
-   * @param spark spark session.
+   *
+   * @param spark      spark session.
    * @param peopleJson path to people json file.
    * @throws AnalysisException exception.
    */
   private static void runBasicDataFrameExample(final SparkSession spark, final String peopleJson)
-      throws AnalysisException {
+    throws AnalysisException {
     Dataset<Row> df = spark.read().json(peopleJson);
 
     // Displays the content of the DataFrame to stdout
@@ -215,7 +222,8 @@
 
   /**
    * Function to run data creation example.
-   * @param spark spark session.
+   *
+   * @param spark      spark session.
    * @param peopleJson path to people json file.
    */
   private static void runDatasetCreationExample(final SparkSession spark, final String peopleJson) {
@@ -227,8 +235,8 @@
     // Encoders are created for Java beans
     Encoder<Person> personEncoder = Encoders.bean(Person.class);
     Dataset<Person> javaBeanDS = spark.createDataset(
-        Collections.singletonList(person),
-        personEncoder
+      Collections.singletonList(person),
+      personEncoder
     );
     javaBeanDS.show();
     // +---+----+
@@ -241,8 +249,8 @@
     Encoder<Integer> integerEncoder = Encoders.INT();
     Dataset<Integer> primitiveDS = spark.createDataset(Arrays.asList(1, 2, 3), integerEncoder);
     Dataset<Integer> transformedDS = primitiveDS.map(
-        (MapFunction<Integer, Integer>) value -> value + 1,
-        integerEncoder);
+      (MapFunction<Integer, Integer>) value -> value + 1,
+      integerEncoder);
     transformedDS.collect(); // Returns [2, 3, 4]
 
     // DataFrames can be converted to a Dataset by providing a class. Mapping based on name
@@ -260,21 +268,22 @@
 
   /**
    * Function to run infer schema example.
-   * @param spark spark session.
+   *
+   * @param spark     spark session.
    * @param peopleTxt path to people txt file.
    */
   private static void runInferSchemaExample(final SparkSession spark, final String peopleTxt) {
     // Create an RDD of Person objects from a text file
     JavaRDD<Person> peopleRDD = spark.read()
-        .textFile(peopleTxt)
-        .javaRDD()
-        .map(line -> {
-          String[] parts = line.split(",");
-          Person person = new Person();
-          person.setName(parts[0]);
-          person.setAge(Integer.parseInt(parts[1].trim()));
-          return person;
-        });
+      .textFile(peopleTxt)
+      .javaRDD()
+      .map(line -> {
+        String[] parts = line.split(",");
+        Person person = new Person();
+        person.setName(parts[0]);
+        person.setAge(Integer.parseInt(parts[1].trim()));
+        return person;
+      });
 
     // Apply a schema to an RDD of JavaBeans to get a DataFrame
     Dataset<Row> peopleDF = spark.createDataFrame(peopleRDD, Person.class);
@@ -287,8 +296,8 @@
     // The columns of a row in the result can be accessed by field index
     Encoder<String> stringEncoder = Encoders.STRING();
     Dataset<String> teenagerNamesByIndexDF = teenagersDF.map(
-        (MapFunction<Row, String>) row -> "Name: " + row.getString(0),
-        stringEncoder);
+      (MapFunction<Row, String>) row -> "Name: " + row.getString(0),
+      stringEncoder);
     teenagerNamesByIndexDF.show();
     // +------------+
     // |       value|
@@ -298,8 +307,8 @@
 
     // or by field name
     Dataset<String> teenagerNamesByFieldDF = teenagersDF.map(
-        (MapFunction<Row, String>) row -> "Name: " + row.<String>getAs("name"),
-        stringEncoder);
+      (MapFunction<Row, String>) row -> "Name: " + row.<String>getAs("name"),
+      stringEncoder);
     teenagerNamesByFieldDF.show();
     // +------------+
     // |       value|
@@ -310,14 +319,15 @@
 
   /**
    * Function to run programmatic schema example.
-   * @param spark spark session.
+   *
+   * @param spark     spark session.
    * @param peopleTxt path to people txt file.
    */
   private static void runProgrammaticSchemaExample(final SparkSession spark, final String peopleTxt) {
     // Create an RDD
     JavaRDD<String> peopleRDD = spark.read()
-        .textFile(peopleTxt)
-        .toJavaRDD();
+      .textFile(peopleTxt)
+      .toJavaRDD();
 
     // The schema is encoded in a string
     String schemaString = "name age";
@@ -348,8 +358,8 @@
     // The results of SQL queries are DataFrames and support all the normal RDD operations
     // The columns of a row in the result can be accessed by field index or by field name
     Dataset<String> namesDS = results.map(
-        (MapFunction<Row, String>) row -> "Name: " + row.getString(0),
-        Encoders.STRING());
+      (MapFunction<Row, String>) row -> "Name: " + row.getString(0),
+      Encoders.STRING());
     namesDS.show();
     // +-------------+
     // |        value|
diff --git a/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaUserDefinedTypedAggregation.java b/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaUserDefinedTypedAggregation.java
index 7ca1554..6fed6da 100644
--- a/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaUserDefinedTypedAggregation.java
+++ b/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaUserDefinedTypedAggregation.java
@@ -29,7 +29,7 @@
 
 /**
  * Java SparkSQL example: User-defined Typed Aggregation.
- *
+ * <p>
  * This code has been copied from the Apache Spark (https://github.com/apache/spark) to demonstrate a spark example.
  */
 public final class JavaUserDefinedTypedAggregation {
@@ -49,6 +49,7 @@
 
     /**
      * Getter.
+     *
      * @return name.
      */
     public String getName() {
@@ -57,6 +58,7 @@
 
     /**
      * Setter.
+     *
      * @param name name.
      */
     public void setName(final String name) {
@@ -65,6 +67,7 @@
 
     /**
      * Getter.
+     *
      * @return salary.
      */
     public long getSalary() {
@@ -73,6 +76,7 @@
 
     /**
      * Setter.
+     *
      * @param salary salary.
      */
     public void setSalary(final long salary) {
@@ -83,7 +87,7 @@
   /**
    * Average class.
    */
-  public static final class Average implements Serializable  {
+  public static final class Average implements Serializable {
     private long sum;
     private long count;
 
@@ -96,7 +100,8 @@
 
     /**
      * Public constructor.
-     * @param sum sum.
+     *
+     * @param sum   sum.
      * @param count count.
      */
     public Average(final long sum, final long count) {
@@ -106,6 +111,7 @@
 
     /**
      * Getter.
+     *
      * @return sum.
      */
     public long getSum() {
@@ -114,6 +120,7 @@
 
     /**
      * Setter.
+     *
      * @param sum sum.
      */
     public void setSum(final long sum) {
@@ -122,6 +129,7 @@
 
     /**
      * Getter.
+     *
      * @return count.
      */
     public long getCount() {
@@ -130,6 +138,7 @@
 
     /**
      * Setter.
+     *
      * @param count count.
      */
     public void setCount(final long count) {
@@ -156,7 +165,7 @@
      * Combine two values to produce a new value.
      * For performance, the function may modify `buffer` and return it instead of constructing a new object.
      *
-     * @param buffer first value.
+     * @param buffer   first value.
      * @param employee second value.
      * @return average.
      */
@@ -214,13 +223,14 @@
 
   /**
    * Main function.
+   *
    * @param args arguments.
    */
   public static void main(final String[] args) {
     SparkSession spark = SparkSession
-        .builder()
-        .appName("Java Spark SQL user-defined Datasets aggregation example")
-        .getOrCreate();
+      .builder()
+      .appName("Java Spark SQL user-defined Datasets aggregation example")
+      .getOrCreate();
 
     Encoder<Employee> employeeEncoder = Encoders.bean(Employee.class);
     String path = args[0];
diff --git a/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaUserDefinedUntypedAggregation.java b/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaUserDefinedUntypedAggregation.java
index 17c99f9..13eae1f 100644
--- a/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaUserDefinedUntypedAggregation.java
+++ b/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaUserDefinedUntypedAggregation.java
@@ -33,7 +33,7 @@
 
 /**
  * Java SparkSQL example: User-defined Untyped Aggregation.
- *
+ * <p>
  * This code has been copied from the Apache Spark (https://github.com/apache/spark) to demonstrate a spark example.
  */
 public final class JavaUserDefinedUntypedAggregation {
@@ -119,7 +119,7 @@
      * Updates the given aggregation buffer `buffer` with new input data from `input`.
      *
      * @param buffer buffer to update.
-     * @param input input to update with.
+     * @param input  input to update with.
      */
     public void update(final MutableAggregationBuffer buffer, final Row input) {
       if (!input.isNullAt(0)) {
@@ -156,13 +156,14 @@
 
   /**
    * Main function.
+   *
    * @param args arguments.
    */
   public static void main(final String[] args) {
     SparkSession spark = SparkSession
-        .builder()
-        .appName("Java Spark SQL user-defined DataFrames aggregation example")
-        .getOrCreate();
+      .builder()
+      .appName("Java Spark SQL user-defined DataFrames aggregation example")
+      .getOrCreate();
 
     // Register the function to access it
     spark.udf().register("myAverage", new MyAverage());
diff --git a/examples/spark/src/test/java/org/apache/nemo/examples/spark/MRJava.java b/examples/spark/src/test/java/org/apache/nemo/examples/spark/MRJava.java
index 13412c7..bd732a2 100644
--- a/examples/spark/src/test/java/org/apache/nemo/examples/spark/MRJava.java
+++ b/examples/spark/src/test/java/org/apache/nemo/examples/spark/MRJava.java
@@ -43,7 +43,7 @@
   @Before
   public void setUp() {
     builder = new ArgBuilder()
-        .addResourceJson(executorResourceFileName);
+      .addResourceJson(executorResourceFileName);
   }
 
   @Test(timeout = ExampleTestArgs.TIMEOUT)
@@ -55,11 +55,11 @@
     final String outputFilePath = ExampleTestArgs.getFileBasePath() + outputFileName;
 
     JobLauncher.main(builder
-        .addJobId(JavaWordCount.class.getSimpleName() + "_test")
-        .addUserMain(JavaWordCount.class.getCanonicalName())
-        .addUserArgs(inputFilePath, outputFilePath)
-        .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
-        .build());
+      .addJobId(JavaWordCount.class.getSimpleName() + "_test")
+      .addUserMain(JavaWordCount.class.getCanonicalName())
+      .addUserArgs(inputFilePath, outputFilePath)
+      .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
+      .build());
 
     try {
       ExampleTestUtil.ensureOutputValidity(ExampleTestArgs.getFileBasePath(), outputFileName, expectedOutputFilename);
@@ -77,11 +77,11 @@
     final String outputFilePath = ExampleTestArgs.getFileBasePath() + outputFileName;
 
     JobLauncher.main(builder
-        .addJobId(JavaWordAndLineCount.class.getSimpleName() + "_test")
-        .addUserMain(JavaWordAndLineCount.class.getCanonicalName())
-        .addUserArgs(inputFilePath, outputFilePath)
-        .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
-        .build());
+      .addJobId(JavaWordAndLineCount.class.getSimpleName() + "_test")
+      .addUserMain(JavaWordAndLineCount.class.getCanonicalName())
+      .addUserArgs(inputFilePath, outputFilePath)
+      .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
+      .build());
 
     try {
       ExampleTestUtil.ensureOutputValidity(ExampleTestArgs.getFileBasePath(), outputFileName, expectedOutputFilename);
diff --git a/examples/spark/src/test/java/org/apache/nemo/examples/spark/SparkJava.java b/examples/spark/src/test/java/org/apache/nemo/examples/spark/SparkJava.java
index 1ebbefc..078bd02 100644
--- a/examples/spark/src/test/java/org/apache/nemo/examples/spark/SparkJava.java
+++ b/examples/spark/src/test/java/org/apache/nemo/examples/spark/SparkJava.java
@@ -21,7 +21,6 @@
 import org.apache.nemo.client.JobLauncher;
 import org.apache.nemo.common.test.ArgBuilder;
 import org.apache.nemo.common.test.ExampleTestArgs;
-import org.apache.nemo.common.test.ExampleTestUtil;
 import org.apache.nemo.compiler.optimizer.policy.DefaultPolicy;
 import org.apache.nemo.examples.spark.sql.JavaUserDefinedTypedAggregation;
 import org.apache.nemo.examples.spark.sql.JavaUserDefinedUntypedAggregation;
@@ -45,7 +44,7 @@
   @Before
   public void setUp() {
     builder = new ArgBuilder()
-        .addResourceJson(executorResourceFileName);
+      .addResourceJson(executorResourceFileName);
   }
 
   @Test(timeout = ExampleTestArgs.TIMEOUT)
@@ -53,11 +52,11 @@
     final String numParallelism = "3";
 
     JobLauncher.main(builder
-        .addJobId(JavaSparkPi.class.getSimpleName() + "_test")
-        .addUserMain(JavaSparkPi.class.getCanonicalName())
-        .addUserArgs(numParallelism)
-        .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
-        .build());
+      .addJobId(JavaSparkPi.class.getSimpleName() + "_test")
+      .addUserMain(JavaSparkPi.class.getCanonicalName())
+      .addUserArgs(numParallelism)
+      .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
+      .build());
   }
 
   @Test(timeout = ExampleTestArgs.TIMEOUT)
@@ -66,11 +65,11 @@
     final String inputFilePath = ExampleTestArgs.getFileBasePath() + inputFileName;
 
     JobLauncher.main(builder
-        .addJobId(JavaUserDefinedTypedAggregation.class.getSimpleName() + "_test")
-        .addUserMain(JavaUserDefinedTypedAggregation.class.getCanonicalName())
-        .addUserArgs(inputFilePath)
-        .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
-        .build());
+      .addJobId(JavaUserDefinedTypedAggregation.class.getSimpleName() + "_test")
+      .addUserMain(JavaUserDefinedTypedAggregation.class.getCanonicalName())
+      .addUserArgs(inputFilePath)
+      .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
+      .build());
   }
 
   @Test(timeout = ExampleTestArgs.TIMEOUT)
@@ -79,11 +78,11 @@
     final String inputFilePath = ExampleTestArgs.getFileBasePath() + inputFileName;
 
     JobLauncher.main(builder
-        .addJobId(JavaUserDefinedUntypedAggregation.class.getSimpleName() + "_test")
-        .addUserMain(JavaUserDefinedUntypedAggregation.class.getCanonicalName())
-        .addUserArgs(inputFilePath)
-        .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
-        .build());
+      .addJobId(JavaUserDefinedUntypedAggregation.class.getSimpleName() + "_test")
+      .addUserMain(JavaUserDefinedUntypedAggregation.class.getCanonicalName())
+      .addUserArgs(inputFilePath)
+      .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
+      .build());
   }
 
   @Test(timeout = ExampleTestArgs.TIMEOUT)
diff --git a/examples/spark/src/test/java/org/apache/nemo/examples/spark/SparkScala.java b/examples/spark/src/test/java/org/apache/nemo/examples/spark/SparkScala.java
index 3f7314e..f0731c3 100644
--- a/examples/spark/src/test/java/org/apache/nemo/examples/spark/SparkScala.java
+++ b/examples/spark/src/test/java/org/apache/nemo/examples/spark/SparkScala.java
@@ -43,7 +43,7 @@
   @Before
   public void setUp() {
     builder = new ArgBuilder()
-        .addResourceJson(executorResourceFileName);
+      .addResourceJson(executorResourceFileName);
   }
 
   @Test(timeout = ExampleTestArgs.TIMEOUT)
@@ -51,11 +51,11 @@
     final String numParallelism = "3";
 
     JobLauncher.main(builder
-        .addJobId(SparkPi.class.getSimpleName() + "_test")
-        .addUserMain(SparkPi.class.getCanonicalName())
-        .addUserArgs(numParallelism)
-        .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
-        .build());
+      .addJobId(SparkPi.class.getSimpleName() + "_test")
+      .addUserMain(SparkPi.class.getCanonicalName())
+      .addUserArgs(numParallelism)
+      .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
+      .build());
   }
 
   @Test(timeout = ExampleTestArgs.TIMEOUT)
@@ -67,11 +67,11 @@
     final String outputFilePath = ExampleTestArgs.getFileBasePath() + outputFileName;
 
     JobLauncher.main(builder
-        .addJobId(SparkWordCount.class.getSimpleName() + "_test")
-        .addUserMain(SparkWordCount.class.getCanonicalName())
-        .addUserArgs(inputFilePath, outputFilePath)
-        .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
-        .build());
+      .addJobId(SparkWordCount.class.getSimpleName() + "_test")
+      .addUserMain(SparkWordCount.class.getCanonicalName())
+      .addUserArgs(inputFilePath, outputFilePath)
+      .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
+      .build());
 
     try {
       ExampleTestUtil.ensureOutputValidity(ExampleTestArgs.getFileBasePath(), outputFileName, expectedOutputFilename);
@@ -92,11 +92,11 @@
     final String outputFilePath2 = ExampleTestArgs.getFileBasePath() + outputFileName2;
 
     JobLauncher.main(builder
-        .addJobId(SparkCachingWordCount.class.getSimpleName() + "_test")
-        .addUserMain(SparkCachingWordCount.class.getCanonicalName())
-        .addUserArgs(inputFilePath, outputFilePath1, outputFilePath2)
-        .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
-        .build());
+      .addJobId(SparkCachingWordCount.class.getSimpleName() + "_test")
+      .addUserMain(SparkCachingWordCount.class.getCanonicalName())
+      .addUserArgs(inputFilePath, outputFilePath1, outputFilePath2)
+      .addOptimizationPolicy(DefaultPolicy.class.getCanonicalName())
+      .build());
 
     try {
       ExampleTestUtil.ensureOutputValidity(ExampleTestArgs.getFileBasePath(), outputFileName1, expectedOutputFilename1);
diff --git a/formatter.xml b/formatter.xml
index 598fc3d..857b87a 100644
--- a/formatter.xml
+++ b/formatter.xml
@@ -18,319 +18,398 @@
 under the License.
 -->
 <profiles version="14">
-<profile kind="CodeFormatterProfile" name="nemo" version="14">
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_for_statment" value="common_lines"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_invocation" value="common_lines"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement" value="common_lines"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="2"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration" value="common_lines"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
-<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
-<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_if_while_statement" value="common_lines"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.compiler.release" value="enabled"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.count_line_length_from_starting_position" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameterized_type_references" value="0"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_enum_constant" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
-<setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier" value="error"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_descriptions_grouped" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="120"/>
-<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_catch_clause" value="common_lines"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
-<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
-<setting id="org.eclipse.jdt.core.formatter.brace_position_for_lambda_body" value="end_of_line"/>
-<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_parameters" value="0"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
-<setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
-<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_annotation" value="common_lines"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_module_statements" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_names_descriptions" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
-<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
-<setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode" value="enabled"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
-<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.wrap_before_conditional_operator" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.align_fields_grouping_blank_lines" value="2147483647"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
-<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_try_clause" value="common_lines"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
-<setting id="org.eclipse.jdt.core.compiler.source" value="10"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="2"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.wrap_before_assignment_operator" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="10"/>
-<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_delcaration" value="common_lines"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_arguments" value="0"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
-<setting id="org.eclipse.jdt.core.formatter.wrap_before_binary_operator" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.compiler.compliance" value="10"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_lambda_declaration" value="common_lines"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
-<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
-<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_for_loop_header" value="0"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
-<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="120"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
-</profile>
+  <profile kind="CodeFormatterProfile" name="nemo" version="14">
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_for_statment" value="common_lines"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_invocation" value="common_lines"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement" value="common_lines"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.indentation.size" value="2"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration"
+             value="common_lines"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
+    <setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
+    <setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_if_while_statement" value="common_lines"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
+    <setting
+      id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference"
+      value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.compiler.release" value="enabled"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.count_line_length_from_starting_position" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_parameterized_type_references" value="0"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_enum_constant" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
+    <setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier" value="error"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.align_tags_descriptions_grouped" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="120"/>
+    <setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments"
+             value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_catch_clause" value="common_lines"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header"
+             value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
+    <setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
+    <setting id="org.eclipse.jdt.core.formatter.brace_position_for_lambda_body" value="end_of_line"/>
+    <setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_type_parameters" value="0"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
+    <setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
+    <setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_annotation" value="common_lines"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_module_statements" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.align_tags_names_descriptions" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
+    <setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
+    <setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode" value="enabled"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
+    <setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.wrap_before_conditional_operator" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.align_fields_grouping_blank_lines" value="2147483647"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
+    <setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_try_clause" value="common_lines"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
+    <setting id="org.eclipse.jdt.core.compiler.source" value="10"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="2"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.wrap_before_assignment_operator" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="10"/>
+    <setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_delcaration" value="common_lines"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_type_arguments" value="0"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header"
+             value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
+    <setting
+      id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference"
+      value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference"
+             value="do not insert"/>
+    <setting
+      id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference"
+      value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
+    <setting id="org.eclipse.jdt.core.formatter.wrap_before_binary_operator" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.compiler.compliance" value="10"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_lambda_declaration" value="common_lines"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
+    <setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
+    <setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_for_loop_header" value="0"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration"
+             value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="false"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations"
+             value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
+    <setting id="org.eclipse.jdt.core.formatter.lineSplit" value="120"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
+    <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
+  </profile>
 </profiles>
diff --git a/pom.xml b/pom.xml
index 3e05b66..539db2c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -17,319 +17,320 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-      <groupId>org.apache</groupId>
-      <artifactId>apache</artifactId>
-      <version>21</version>
-    </parent>
+  <parent>
+    <groupId>org.apache</groupId>
+    <artifactId>apache</artifactId>
+    <version>21</version>
+  </parent>
 
-    <groupId>org.apache.nemo</groupId>
-    <artifactId>nemo-project</artifactId>
-    <version>0.2-SNAPSHOT</version>
-    <packaging>pom</packaging>
-    <name>Nemo Project</name>
+  <groupId>org.apache.nemo</groupId>
+  <artifactId>nemo-project</artifactId>
+  <version>0.2-SNAPSHOT</version>
+  <packaging>pom</packaging>
+  <name>Nemo Project</name>
 
-    <scm>
-        <connection>scm:git:https://gitbox.apache.org/repos/asf/incubator-nemo.git</connection>
-        <developerConnection>scm:git:https://gitbox.apache.org/repos/asf/incubator-nemo.git</developerConnection>
-        <url>https://github.com/apache/incubator-nemo/tree/${project.scm.tag}</url>
-        <tag>master</tag>
-    </scm>
+  <scm>
+    <connection>scm:git:https://gitbox.apache.org/repos/asf/incubator-nemo.git</connection>
+    <developerConnection>scm:git:https://gitbox.apache.org/repos/asf/incubator-nemo.git</developerConnection>
+    <url>https://github.com/apache/incubator-nemo/tree/${project.scm.tag}</url>
+    <tag>master</tag>
+  </scm>
 
-    <properties>
-        <maven.compiler.source>1.8</maven.compiler.source>
-        <maven.compiler.target>1.8</maven.compiler.target>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-        <auto-service.version>1.0-rc2</auto-service.version>
-        <beam.version>2.11.0</beam.version>
-        <spark.version>2.2.0</spark.version>
-        <scala.version>2.11.8</scala.version>
-        <kryo.version>4.0.1</kryo.version>
-        <reef.version>0.16.0</reef.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <hadoop.version>2.7.2</hadoop.version>
-        <log4j.configuration>file://log4j.properties</log4j.configuration>
-        <netty.version>4.1.16.Final</netty.version>
-        <guava.version>19.0</guava.version>
-        <grpc.version>1.7.0</grpc.version>
-        <jackson.version>2.8.8</jackson.version>
-        <netlib.version>1.1.2</netlib.version>
-        <netty.version>4.1.16.Final</netty.version>
-        <jetty-server.version>9.4.10.v20180503</jetty-server.version>
-        <jetty-servlet.version>9.4.10.v20180503</jetty-servlet.version>
-        <commons-math.version>3.6.1</commons-math.version>
-        <sqlite-jdbc.version>3.25.2</sqlite-jdbc.version>
-        <postgresql.version>42.2.5</postgresql.version>
-        <slf4j.version>1.7.20</slf4j.version>
-        <!-- Tests -->
-        <mockito.version>2.13.0</mockito.version>
-        <powermock.version>2.0.0-beta.5</powermock.version>
-        <surefire.version>3.0.0-M1</surefire.version>
-        <junit.version>4.12</junit.version>
-    </properties>
+  <properties>
+    <maven.compiler.source>1.8</maven.compiler.source>
+    <maven.compiler.target>1.8</maven.compiler.target>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <auto-service.version>1.0-rc2</auto-service.version>
+    <beam.version>2.11.0</beam.version>
+    <spark.version>2.2.0</spark.version>
+    <scala.version>2.11.8</scala.version>
+    <kryo.version>4.0.1</kryo.version>
+    <reef.version>0.16.0</reef.version>
+    <protobuf.version>2.5.0</protobuf.version>
+    <hadoop.version>2.7.2</hadoop.version>
+    <log4j.configuration>file://log4j.properties</log4j.configuration>
+    <netty.version>4.1.16.Final</netty.version>
+    <guava.version>19.0</guava.version>
+    <grpc.version>1.7.0</grpc.version>
+    <jackson.version>2.8.8</jackson.version>
+    <netlib.version>1.1.2</netlib.version>
+    <netty.version>4.1.16.Final</netty.version>
+    <jetty-server.version>9.4.10.v20180503</jetty-server.version>
+    <jetty-servlet.version>9.4.10.v20180503</jetty-servlet.version>
+    <commons-math.version>3.6.1</commons-math.version>
+    <sqlite-jdbc.version>3.25.2</sqlite-jdbc.version>
+    <postgresql.version>42.2.5</postgresql.version>
+    <slf4j.version>1.7.20</slf4j.version>
+    <!-- Tests -->
+    <mockito.version>2.13.0</mockito.version>
+    <powermock.version>2.0.0-beta.5</powermock.version>
+    <surefire.version>3.0.0-M1</surefire.version>
+    <junit.version>4.12</junit.version>
+  </properties>
 
-    <modules>
-        <module>conf</module>
-        <module>client</module>
-        <module>common</module>
-        <module>compiler</module>
-        <module>examples</module>
-        <module>runtime</module>
-    </modules>
+  <modules>
+    <module>conf</module>
+    <module>client</module>
+    <module>common</module>
+    <module>compiler</module>
+    <module>examples</module>
+    <module>runtime</module>
+  </modules>
 
+  <dependencies>
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+      <version>${protobuf.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.xerial.snappy</groupId>
+      <artifactId>snappy-java</artifactId>
+      <version>1.1.1.3</version>
+    </dependency>
+    <!--tests-->
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-core</artifactId>
+      <version>${mockito.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-module-junit4</artifactId>
+      <version>${powermock.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-api-mockito2</artifactId>
+      <version>${powermock.version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <dependencyManagement>
     <dependencies>
-        <dependency>
-            <groupId>com.google.protobuf</groupId>
-            <artifactId>protobuf-java</artifactId>
-            <version>${protobuf.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.xerial.snappy</groupId>
-            <artifactId>snappy-java</artifactId>
-            <version>1.1.1.3</version>
-        </dependency>
-        <!--tests-->
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <version>${junit.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.mockito</groupId>
-            <artifactId>mockito-core</artifactId>
-            <version>${mockito.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.powermock</groupId>
-            <artifactId>powermock-module-junit4</artifactId>
-            <version>${powermock.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.powermock</groupId>
-            <artifactId>powermock-api-mockito2</artifactId>
-            <version>${powermock.version}</version>
-            <scope>test</scope>
-        </dependency>
+      <!-- Follow netty version of grpc which is more later than the netty version of reef-wake -->
+      <dependency>
+        <groupId>io.netty</groupId>
+        <artifactId>netty-all</artifactId>
+        <version>${netty.version}</version>
+      </dependency>
+      <!-- Follow guava version of grpc -->
+      <dependency>
+        <groupId>com.google.guava</groupId>
+        <artifactId>guava</artifactId>
+        <version>${guava.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.slf4j</groupId>
+        <artifactId>slf4j-api</artifactId>
+        <version>${slf4j.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.slf4j</groupId>
+        <artifactId>slf4j-log4j12</artifactId>
+        <version>${slf4j.version}</version>
+      </dependency>
+      <dependency>
+        <!--
+        This is needed to view the logs when running unit tests.
+        See https://dzone.com/articles/how-configure-slf4j-different for details.
+        -->
+        <groupId>org.slf4j</groupId>
+        <artifactId>slf4j-simple</artifactId>
+        <version>1.6.2</version>
+        <scope>test</scope>
+      </dependency>
     </dependencies>
+  </dependencyManagement>
 
-    <dependencyManagement>
-        <dependencies>
-            <!-- Follow netty version of grpc which is more later than the netty version of reef-wake -->
-            <dependency>
-                <groupId>io.netty</groupId>
-                <artifactId>netty-all</artifactId>
-                <version>${netty.version}</version>
-            </dependency>
-            <!-- Follow guava version of grpc -->
-            <dependency>
-                <groupId>com.google.guava</groupId>
-                <artifactId>guava</artifactId>
-                <version>${guava.version}</version>
-            </dependency>
-            <dependency>
-                <groupId>org.slf4j</groupId>
-                <artifactId>slf4j-api</artifactId>
-                <version>${slf4j.version}</version>
-            </dependency>
-            <dependency>
-                <groupId>org.slf4j</groupId>
-                <artifactId>slf4j-log4j12</artifactId>
-                <version>${slf4j.version}</version>
-            </dependency>
-            <dependency>
-                <!--
-                This is needed to view the logs when running unit tests.
-                See https://dzone.com/articles/how-configure-slf4j-different for details.
-                -->
-                <groupId>org.slf4j</groupId>
-                <artifactId>slf4j-simple</artifactId>
-                <version>1.6.2</version>
-                <scope>test</scope>
-            </dependency>
-        </dependencies>
-    </dependencyManagement>
+  <build>
+    <extensions>
+      <extension>
+        <groupId>kr.motd.maven</groupId>
+        <artifactId>os-maven-plugin</artifactId>
+        <version>1.5.0.Final</version>
+      </extension>
+    </extensions>
 
-    <build>
-        <extensions>
-            <extension>
-                <groupId>kr.motd.maven</groupId>
-                <artifactId>os-maven-plugin</artifactId>
-                <version>1.5.0.Final</version>
-            </extension>
-        </extensions>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.codehaus.mojo</groupId>
+          <artifactId>sonar-maven-plugin</artifactId>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-surefire-plugin</artifactId>
+          <version>${surefire.version}</version>
+          <configuration>
+            <!-- Useful for debugging: See https://stackoverflow.com/a/16941432 -->
+            <trimStackTrace>false</trimStackTrace>
+          </configuration>
+        </plugin>
+      </plugins>
+    </pluginManagement>
 
-        <pluginManagement>
-            <plugins>
-                <plugin>
-                    <groupId>org.codehaus.mojo</groupId>
-                    <artifactId>sonar-maven-plugin</artifactId>
-                </plugin>
-                <plugin>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-surefire-plugin</artifactId>
-                    <version>${surefire.version}</version>
-                  <configuration>
-                    <!-- Useful for debugging: See https://stackoverflow.com/a/16941432 -->
-                    <trimStackTrace>false</trimStackTrace>
-                  </configuration>
-                </plugin>
-            </plugins>
-        </pluginManagement>
-
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-checkstyle-plugin</artifactId>
-                <version>2.17</version>
-                <executions>
-                    <execution>
-                        <id>validate</id>
-                        <phase>validate</phase>
-                        <configuration>
-                            <configLocation>checkstyle.xml</configLocation>
-                            <headerLocation>checkstyle.license</headerLocation>
-                            <encoding>UTF-8</encoding>
-                            <consoleOutput>true</consoleOutput>
-                            <failsOnError>true</failsOnError>
-                            <excludes>**/org/apache/reef/**/*</excludes>
-                        </configuration>
-                        <goals>
-                            <goal>check</goal>
-                            <goal>checkstyle</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-              <groupId>org.apache.maven.plugins</groupId>
-              <artifactId>maven-failsafe-plugin</artifactId>
-              <version>${surefire.version}</version>
-              <executions>
-                <execution>
-                  <goals>
-                    <goal>integration-test</goal>
-                    <goal>verify</goal>
-                  </goals>
-                </execution>
-              </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.codehaus.mojo</groupId>
-                <artifactId>build-helper-maven-plugin</artifactId>
-                <version>3.0.0</version>
-                <executions>
-                    <execution>
-                        <id>add-source</id>
-                        <phase>generate-sources</phase>
-                        <goals>
-                            <goal>add-source</goal>
-                        </goals>
-                        <configuration>
-                            <sources>
-                                <source>${project.basedir}/target/generated-sources/</source>
-                            </sources>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.xolstice.maven.plugins</groupId>
-                <artifactId>protobuf-maven-plugin</artifactId>
-                <version>0.5.0</version>
-                <configuration>
-                    <pluginId>grpc-java</pluginId>
-                    <protocArtifact>com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}</protocArtifact>
-                    <pluginArtifact>io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier}</pluginArtifact>
-                </configuration>
-                <executions>
-                    <execution>
-                        <goals>
-                            <goal>compile</goal>
-                            <goal>compile-custom</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-javadoc-plugin</artifactId>
-                <version>3.0.0</version>
-                <configuration>
-                  <excludePackageNames>*.org.apache.nemo.runtime.common.comm</excludePackageNames>
-                  <outputDirectory>docs/apidocs</outputDirectory>
-                  <reportOutputDirectory>docs/apidocs</reportOutputDirectory>
-                </configuration>
-                <executions>
-                    <execution>
-                      <id>aggregate</id>
-                      <goals>
-                          <goal>aggregate</goal>
-                      </goals>
-                      <phase>site</phase>
-                    </execution>
-                    <execution>
-                      <id>test-javadoc</id>
-                      <goals>
-                        <goal>javadoc</goal>
-                      </goals>
-                      <phase>validate</phase>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.rat</groupId>
-                <artifactId>apache-rat-plugin</artifactId>
-                <version>0.12</version>
-                <executions>
-                    <execution>
-                        <phase>verify</phase>
-                        <goals>
-                            <goal>check</goal>
-                        </goals>
-                    </execution>
-                </executions>
-                <configuration>
-                    <excludes>
-                        <!-- Markdown files such as README.md -->
-                        <exclude>**/*.md</exclude>
-                        <!-- Git files -->
-                        <exclude>.gitattributes</exclude>
-                        <exclude>.gitignore</exclude>
-                        <exclude>.git/**</exclude>
-                        <!-- Intellij idea project files -->
-                        <exclude>**/.idea/**</exclude>
-                        <exclude>**/*.iml</exclude>
-                        <!-- Maven build files -->
-                        <exclude>**/target/**</exclude>
-                        <!-- REEF run files -->
-                        <exclude>**/REEF_LOCAL_RUNTIME/**</exclude>
-                        <!-- Resources -->
-                        <exclude>**/resources/**</exclude>
-                        <!-- Logs -->
-                        <exclude>**/*.log</exclude>
-                        <!-- Web UI -->
-                        <exclude>webui/.nuxt/**</exclude>
-                        <exclude>webui/node_modules/**</exclude>
-                        <exclude>**/*.json</exclude>
-                        <exclude>**/.editorconfig</exclude>
-                        <exclude>**/config.gypi</exclude>
-                        <!-- EditorConfig -->
-                        <exclude>.editorconfig</exclude>
-                        <!-- formatter.xml -->
-                        <exclude>formatter.xml</exclude>
-                        <!-- javadoc -->
-                        <exclude>**/docs/**</exclude>
-                        <!-- dag files -->
-                        <exclude>**/dag/**</exclude>
-                    </excludes>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-checkstyle-plugin</artifactId>
+        <version>2.17</version>
+        <executions>
+          <execution>
+            <id>validate</id>
+            <phase>validate</phase>
+            <configuration>
+              <configLocation>checkstyle.xml</configLocation>
+              <headerLocation>checkstyle.license</headerLocation>
+              <encoding>UTF-8</encoding>
+              <consoleOutput>true</consoleOutput>
+              <failsOnError>true</failsOnError>
+              <excludes>**/org/apache/reef/**/*</excludes>
+            </configuration>
+            <goals>
+              <goal>check</goal>
+              <goal>checkstyle</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-failsafe-plugin</artifactId>
+        <version>${surefire.version}</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>integration-test</goal>
+              <goal>verify</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <version>3.0.0</version>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>${project.basedir}/target/generated-sources/</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.xolstice.maven.plugins</groupId>
+        <artifactId>protobuf-maven-plugin</artifactId>
+        <version>0.5.0</version>
+        <configuration>
+          <pluginId>grpc-java</pluginId>
+          <protocArtifact>com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}</protocArtifact>
+          <pluginArtifact>io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier}</pluginArtifact>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>compile</goal>
+              <goal>compile-custom</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-javadoc-plugin</artifactId>
+        <version>3.0.0</version>
+        <configuration>
+          <excludePackageNames>*.org.apache.nemo.runtime.common.comm</excludePackageNames>
+          <outputDirectory>docs/apidocs</outputDirectory>
+          <reportOutputDirectory>docs/apidocs</reportOutputDirectory>
+        </configuration>
+        <executions>
+          <execution>
+            <id>aggregate</id>
+            <goals>
+              <goal>aggregate</goal>
+            </goals>
+            <phase>site</phase>
+          </execution>
+          <execution>
+            <id>test-javadoc</id>
+            <goals>
+              <goal>javadoc</goal>
+            </goals>
+            <phase>validate</phase>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+        <version>0.12</version>
+        <executions>
+          <execution>
+            <phase>verify</phase>
+            <goals>
+              <goal>check</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <excludes>
+            <!-- Markdown files such as README.md -->
+            <exclude>**/*.md</exclude>
+            <!-- Git files -->
+            <exclude>.gitattributes</exclude>
+            <exclude>.gitignore</exclude>
+            <exclude>.git/**</exclude>
+            <!-- Intellij idea project files -->
+            <exclude>**/.idea/**</exclude>
+            <exclude>**/*.iml</exclude>
+            <!-- Maven build files -->
+            <exclude>**/target/**</exclude>
+            <!-- REEF run files -->
+            <exclude>**/REEF_LOCAL_RUNTIME/**</exclude>
+            <!-- Resources -->
+            <exclude>**/resources/**</exclude>
+            <!-- Logs -->
+            <exclude>**/*.log</exclude>
+            <!-- Web UI -->
+            <exclude>webui/.nuxt/**</exclude>
+            <exclude>webui/node_modules/**</exclude>
+            <exclude>**/*.json</exclude>
+            <exclude>**/.editorconfig</exclude>
+            <exclude>**/config.gypi</exclude>
+            <!-- EditorConfig -->
+            <exclude>.editorconfig</exclude>
+            <!-- formatter.xml -->
+            <exclude>formatter.xml</exclude>
+            <!-- javadoc -->
+            <exclude>**/docs/**</exclude>
+            <!-- dag files -->
+            <exclude>**/dag/**</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
 </project>
diff --git a/runtime/common/pom.xml b/runtime/common/pom.xml
index 5272292..6f8c68e 100644
--- a/runtime/common/pom.xml
+++ b/runtime/common/pom.xml
@@ -17,66 +17,67 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-runtime</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-runtime</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
 
-    <artifactId>nemo-runtime-common</artifactId>
-    <name>Nemo Runtime Common</name>
+  <artifactId>nemo-runtime-common</artifactId>
+  <name>Nemo Runtime Common</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-conf</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.postgresql</groupId>
-            <artifactId>postgresql</artifactId>
-            <version>${postgresql.version}</version>
-        </dependency>
-        <!-- GRPC -->
-        <dependency>
-            <groupId>io.grpc</groupId>
-            <artifactId>grpc-netty</artifactId>
-            <version>${grpc.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>io.grpc</groupId>
-            <artifactId>grpc-protobuf</artifactId>
-            <version>${grpc.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>io.grpc</groupId>
-            <artifactId>grpc-stub</artifactId>
-            <version>${grpc.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>io.grpc</groupId>
-            <artifactId>grpc-testing</artifactId>
-            <version>${grpc.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-core</artifactId>
-            <version>${jackson.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-databind</artifactId>
-            <version>${jackson.version}</version>
-        </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-conf</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.postgresql</groupId>
+      <artifactId>postgresql</artifactId>
+      <version>${postgresql.version}</version>
+    </dependency>
+    <!-- GRPC -->
+    <dependency>
+      <groupId>io.grpc</groupId>
+      <artifactId>grpc-netty</artifactId>
+      <version>${grpc.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.grpc</groupId>
+      <artifactId>grpc-protobuf</artifactId>
+      <version>${grpc.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.grpc</groupId>
+      <artifactId>grpc-stub</artifactId>
+      <version>${grpc.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.grpc</groupId>
+      <artifactId>grpc-testing</artifactId>
+      <version>${grpc.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-core</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+  </dependencies>
 </project>
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/NativeChannelImplementationSelector.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/NativeChannelImplementationSelector.java
index 5196947..73d3722 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/NativeChannelImplementationSelector.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/NativeChannelImplementationSelector.java
@@ -49,14 +49,14 @@
   // We may want to add selection of KQueue (for BSD). This requires higher version of netty.
 
   private static final BiFunction<Integer, ThreadFactory, EventLoopGroup> EVENT_LOOP_GROUP_FUNCTION =
-      Epoll.isAvailable() ? (numThreads, threadFactory) -> new EpollEventLoopGroup(numThreads, threadFactory)
-          : (numThreads, threadFactory) -> new NioEventLoopGroup(numThreads, threadFactory);
+    Epoll.isAvailable() ? (numThreads, threadFactory) -> new EpollEventLoopGroup(numThreads, threadFactory)
+      : (numThreads, threadFactory) -> new NioEventLoopGroup(numThreads, threadFactory);
   private static final Class<? extends ServerChannel> SERVER_CHANNEL_CLASS =
-      Epoll.isAvailable() ? EpollServerSocketChannel.class
-          : NioServerSocketChannel.class;
+    Epoll.isAvailable() ? EpollServerSocketChannel.class
+      : NioServerSocketChannel.class;
   private static final Class<? extends Channel> CHANNEL_CLASS =
-      Epoll.isAvailable() ? EpollSocketChannel.class
-          : NioSocketChannel.class;
+    Epoll.isAvailable() ? EpollSocketChannel.class
+      : NioSocketChannel.class;
 
   @Override
   public EventLoopGroup newEventLoopGroup(final int numThreads, final ThreadFactory threadFactory) {
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/NettyChannelImplementationSelector.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/NettyChannelImplementationSelector.java
index 2a0738c..2904448 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/NettyChannelImplementationSelector.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/NettyChannelImplementationSelector.java
@@ -33,6 +33,7 @@
 
   /**
    * Creates a new {@link EventLoopGroup}.
+   *
    * @param numThreads    the number of threads
    * @param threadFactory the {@link ThreadFactory}
    * @return a new {@link EventLoopGroup}
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/ReplyFutureMap.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/ReplyFutureMap.java
index 95c4446..fbe3e7a 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/ReplyFutureMap.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/ReplyFutureMap.java
@@ -23,6 +23,7 @@
 
 /**
  * Orchestrate message sender and receiver using {@link CompletableFuture} for asynchronous request-reply communication.
+ *
  * @param <T> the type of successful reply
  */
 public final class ReplyFutureMap<T> {
@@ -37,6 +38,7 @@
    * Called by message sender, just before a new request is sent.
    * Note that this method should be used *before* actual message sending.
    * Otherwise {@code onSuccessMessage} can be called before putting new future to {@code requestIdToFuture}.
+   *
    * @param id the request id
    * @return a {@link CompletableFuture} for the reply
    */
@@ -48,7 +50,8 @@
 
   /**
    * Called by message receiver, for a successful reply message.
-   * @param id the request id
+   *
+   * @param id             the request id
    * @param successMessage the reply message
    */
   public void onSuccessMessage(final long id, final T successMessage) {
@@ -57,6 +60,7 @@
 
   /**
    * Called for a failure in request-reply communication.
+   *
    * @param id the request id
    * @param ex throwable exception
    */
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/RuntimeTestUtil.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/RuntimeTestUtil.java
index 0bd523f..39f0f7c 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/RuntimeTestUtil.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/RuntimeTestUtil.java
@@ -20,7 +20,8 @@
 
 import org.apache.nemo.common.Pair;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.IntStream;
 
@@ -37,12 +38,13 @@
 
   /**
    * Gets a list of integer pair elements in range.
+   *
    * @param start value of the range (inclusive).
    * @param end   value of the range (exclusive).
    * @return the list of elements.
    */
   public static List getRangedNumList(final int start,
-                                               final int end) {
+                                      final int end) {
     final List numList = new ArrayList<>(end - start);
     IntStream.range(start, end).forEach(number -> numList.add(Pair.of(number, number)));
     return numList;
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/exception/AbsentBlockException.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/exception/AbsentBlockException.java
index 20f059d..0f56d41 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/exception/AbsentBlockException.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/exception/AbsentBlockException.java
@@ -29,7 +29,7 @@
 
   /**
    * @param blockId id of the block
-   * @param state  state of the block
+   * @param state   state of the block
    */
   public AbsentBlockException(final String blockId, final BlockState.State state) {
     this.blockId = blockId;
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageContext.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageContext.java
index 1888c0c..83b27299 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageContext.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageContext.java
@@ -27,7 +27,7 @@
    * Send back a reply message.
    *
    * @param replyMessage a reply message
-   * @param <U> type of the reply message
+   * @param <U>          type of the reply message
    */
   <U> void reply(U replyMessage);
 
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageEnvironment.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageEnvironment.java
index 1eb3141..7e4067b 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageEnvironment.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageEnvironment.java
@@ -42,8 +42,8 @@
    * Set up a {@link MessageListener} with a listener id.
    *
    * @param listenerId an identifier of the message listener
-   * @param listener a message listener
-   * @param <T> The type of the message to be sent in the environment
+   * @param listener   a message listener
+   * @param <T>        The type of the message to be sent in the environment
    */
   <T> void setupListener(String listenerId, MessageListener<T> listener);
 
@@ -60,13 +60,14 @@
    *
    * @param receiverId a receiver id
    * @param listenerId an identifier of the message listener
-   * @param <T> The type of the message to be sent in the environment
+   * @param <T>        The type of the message to be sent in the environment
    * @return a message sender
    */
   <T> Future<MessageSender<T>> asyncConnect(String receiverId, String listenerId);
 
   /**
    * Close this message environment.
+   *
    * @throws Exception while closing
    */
   void close() throws Exception;
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageListener.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageListener.java
index 1325290..516d556 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageListener.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageListener.java
@@ -29,13 +29,15 @@
 
   /**
    * Called back when a message is received.
+   *
    * @param message a message
    */
   void onMessage(T message);
 
   /**
    * Called back when a message is received, and return a response using {@link MessageContext}.
-   * @param message a message
+   *
+   * @param message        a message
    * @param messageContext a message context
    */
   void onMessageWithContext(T message, MessageContext messageContext);
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageSender.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageSender.java
index 6d14557..de1f883 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageSender.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/MessageSender.java
@@ -22,6 +22,7 @@
 
 /**
  * This class sends messages to {@link MessageListener} with some defined semantics.
+ *
  * @param <T> message type
  */
 public interface MessageSender<T> {
@@ -39,13 +40,14 @@
    * a reply message. If there was an exception, the returned future would be failed.
    *
    * @param message a message
-   * @param <U> reply message type.
+   * @param <U>     reply message type.
    * @return a future
    */
   <U> CompletableFuture<U> request(T message);
 
   /**
    * Closes the connection.
+   *
    * @throws Exception while closing.
    */
   void close() throws Exception;
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/PersistentConnectionToMasterMap.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/PersistentConnectionToMasterMap.java
index 788c08d..ce1fd57 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/PersistentConnectionToMasterMap.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/PersistentConnectionToMasterMap.java
@@ -18,8 +18,8 @@
  */
 package org.apache.nemo.runtime.common.message;
 
-import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.common.exception.NodeConnectionException;
+import org.apache.nemo.runtime.common.comm.ControlMessage;
 
 import javax.inject.Inject;
 import java.util.HashMap;
@@ -40,11 +40,11 @@
     try {
       // Connect the globally known message listener IDs.
       messageSenders.put(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID,
-          messageEnvironment.<ControlMessage.Message>asyncConnect(MessageEnvironment.MASTER_COMMUNICATION_ID,
-              MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID).get());
+        messageEnvironment.<ControlMessage.Message>asyncConnect(MessageEnvironment.MASTER_COMMUNICATION_ID,
+          MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID).get());
       messageSenders.put(MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID,
-          messageEnvironment.<ControlMessage.Message>asyncConnect(MessageEnvironment.MASTER_COMMUNICATION_ID,
-              MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID).get());
+        messageEnvironment.<ControlMessage.Message>asyncConnect(MessageEnvironment.MASTER_COMMUNICATION_ID,
+          MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID).get());
     } catch (InterruptedException | ExecutionException e) {
       Thread.currentThread().interrupt();
       throw new NodeConnectionException(e);
@@ -65,7 +65,7 @@
       final MessageSender<ControlMessage.Message> createdMessageSender;
       try {
         createdMessageSender = messageEnvironment.<ControlMessage.Message>asyncConnect(
-            MessageEnvironment.MASTER_COMMUNICATION_ID, listenerId).get();
+          MessageEnvironment.MASTER_COMMUNICATION_ID, listenerId).get();
         messageSenders.put(listenerId, createdMessageSender);
       } catch (InterruptedException | ExecutionException e) {
         Thread.currentThread().interrupt();
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageClient.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageClient.java
index 7994993..69f5e19 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageClient.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageClient.java
@@ -18,12 +18,12 @@
  */
 package org.apache.nemo.runtime.common.message.grpc;
 
-import org.apache.nemo.runtime.common.comm.ControlMessage;
-import org.apache.nemo.runtime.common.comm.MessageServiceGrpc;
 import io.grpc.ManagedChannel;
 import io.grpc.ManagedChannelBuilder;
 import io.grpc.StatusRuntimeException;
 import io.grpc.stub.StreamObserver;
+import org.apache.nemo.runtime.common.comm.ControlMessage;
+import org.apache.nemo.runtime.common.comm.MessageServiceGrpc;
 import org.apache.reef.io.network.naming.NameResolver;
 import org.apache.reef.wake.Identifier;
 import org.apache.reef.wake.IdentifierFactory;
@@ -55,9 +55,10 @@
 
   /**
    * Constructor.
+   *
    * @param nameResolver name resolver.
-   * @param idFactory identifier factory.
-   * @param receiverId id of the receiver.
+   * @param idFactory    identifier factory.
+   * @param receiverId   id of the receiver.
    */
   GrpcMessageClient(final NameResolver nameResolver,
                     final IdentifierFactory idFactory,
@@ -83,15 +84,16 @@
 
   /**
    * Method for setting up a channel.
+   *
    * @param ipAddress ipAddress of the socket.
    * @throws Exception exception while setting up.
    */
   private void setupChannel(final InetSocketAddress ipAddress) throws Exception {
-      this.managedChannel = ManagedChannelBuilder.forAddress(ipAddress.getHostName(), ipAddress.getPort())
-          .usePlaintext(true)
-          .build();
-      this.blockingStub = MessageServiceGrpc.newBlockingStub(managedChannel);
-      this.asyncStub = MessageServiceGrpc.newStub(managedChannel);
+    this.managedChannel = ManagedChannelBuilder.forAddress(ipAddress.getHostName(), ipAddress.getPort())
+      .usePlaintext(true)
+      .build();
+    this.blockingStub = MessageServiceGrpc.newBlockingStub(managedChannel);
+    this.asyncStub = MessageServiceGrpc.newStub(managedChannel);
   }
 
   /**
@@ -101,12 +103,12 @@
    */
   void send(final ControlMessage.Message message) {
     LOG.debug("[SEND] request msg.id={}, msg.listenerId={}, msg.type={}",
-        message.getId(), message.getListenerId(), message.getType());
+      message.getId(), message.getListenerId(), message.getType());
     try {
       blockingStub.send(message);
     } catch (final StatusRuntimeException e) {
       LOG.warn("RPC send call failed with msg.id={}, msg.listenerId={}, msg.type={}, e.cause={}, e.message={}",
-          message.getId(), message.getListenerId(), message.getType(), e.getCause(), e.getMessage());
+        message.getId(), message.getListenerId(), message.getType(), e.getCause(), e.getMessage());
     }
   }
 
@@ -118,28 +120,28 @@
    */
   CompletableFuture<ControlMessage.Message> request(final ControlMessage.Message message) {
     LOG.debug("[REQUEST] request msg.id={}, msg.listenerId={}, msg.type={}",
-        message.getId(), message.getListenerId(), message.getType());
+      message.getId(), message.getListenerId(), message.getType());
 
     final CompletableFuture<ControlMessage.Message> completableFuture = new CompletableFuture<>();
     asyncStub.request(message, new StreamObserver<ControlMessage.Message>() {
       @Override
       public void onNext(final ControlMessage.Message responseMessage) {
         LOG.debug("[REQUEST] response msg.id={}, msg.listenerId={}, msg.type={}",
-            responseMessage.getId(), responseMessage.getListenerId(), responseMessage.getType());
+          responseMessage.getId(), responseMessage.getListenerId(), responseMessage.getType());
         completableFuture.complete(responseMessage);
       }
 
       @Override
       public void onError(final Throwable e) {
         LOG.warn("RPC request call failed with msg.id={}, msg.listenerId={}, msg.type={}, e.cause={}, e.message={}",
-            message.getId(), message.getListenerId(), message.getType(), e.getCause(), e.getMessage());
+          message.getId(), message.getListenerId(), message.getType(), e.getCause(), e.getMessage());
         completableFuture.completeExceptionally(e);
       }
 
       @Override
       public void onCompleted() {
         LOG.debug("[REQUEST] completed. msg.id={}, msg.listenerId={}, msg.type={}",
-            message.getId(), message.getListenerId(), message.getType());
+          message.getId(), message.getListenerId(), message.getType());
       }
     });
 
@@ -148,6 +150,7 @@
 
   /**
    * Closes the channel.
+   *
    * @throws Exception exception while closing.
    */
   void close() throws Exception {
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageContext.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageContext.java
index f3bf328..30979bb 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageContext.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageContext.java
@@ -18,9 +18,9 @@
  */
 package org.apache.nemo.runtime.common.message.grpc;
 
+import io.grpc.stub.StreamObserver;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.message.MessageContext;
-import io.grpc.stub.StreamObserver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -35,6 +35,7 @@
 
   /**
    * Constructor.
+   *
    * @param responseObserver observer for the message.
    */
   GrpcMessageContext(final StreamObserver<ControlMessage.Message> responseObserver) {
@@ -45,7 +46,7 @@
   public <U> void reply(final U replyMessage) {
     final ControlMessage.Message message = (ControlMessage.Message) replyMessage;
     LOG.debug("[REQUEST] response msg.id={}, msg.listenerId={}, msg.type={}",
-        message.getId(), message.getListenerId(), message.getType());
+      message.getId(), message.getListenerId(), message.getType());
     responseObserver.onNext((ControlMessage.Message) replyMessage);
     responseObserver.onCompleted();
   }
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageEnvironment.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageEnvironment.java
index e7a9590..aedf5aa 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageEnvironment.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageEnvironment.java
@@ -40,11 +40,11 @@
  * For each GrpcMessageEnvironment, there are a single {@link GrpcMessageServer} and multiple {@link GrpcMessageClient},
  * which are responsible for responding replies for messages from other clients, and for transferring messages
  * to other servers, respectively.
- *
+ * <p>
  * The {@link GrpcMessageServer} is started as soon as the environment is initialized, and registers the unique sender
  * id of the local node to name server, which is used for id-based communication. The {@link MessageListener}s should
  * be setup to correctly handle incoming messages.
- *
+ * <p>
  * The {@link GrpcMessageClient}s are created whenever there is a request to create a {@link MessageSender}, a component
  * to issue RPC calls to other servers. Like the {@link GrpcMessageServer} registers its id to the name server, the
  * {@link GrpcMessageClient} uses target receiver id to look up the name server to resolve the ip address of the target
@@ -61,10 +61,10 @@
 
   @Inject
   private GrpcMessageEnvironment(
-      final LocalAddressProvider localAddressProvider,
-      final NameResolver nameResolver,
-      final IdentifierFactory idFactory,
-      @Parameter(MessageParameters.SenderId.class) final String localSenderId) {
+    final LocalAddressProvider localAddressProvider,
+    final NameResolver nameResolver,
+    final IdentifierFactory idFactory,
+    @Parameter(MessageParameters.SenderId.class) final String localSenderId) {
     this.nameResolver = nameResolver;
     this.idFactory = idFactory;
     this.grpcServer = new GrpcMessageServer(localAddressProvider, nameResolver, idFactory, localSenderId);
@@ -99,7 +99,7 @@
     try {
       grpcClient.connect();
       final MessageSender<ControlMessage.Message> messageSender = new GrpcMessageSender(
-          receiverId, listenerId, grpcClient);
+        receiverId, listenerId, grpcClient);
       completableFuture.complete(messageSender);
     } catch (final Exception e) {
       LOG.warn("Failed to connect a receiver id=" + receiverId + ", listenerId=" + listenerId, e);
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageSender.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageSender.java
index 48ffdfb..0ff9b4b 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageSender.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageSender.java
@@ -35,6 +35,7 @@
 
   /**
    * Constructor.
+   *
    * @param receiverId id of the receiver.
    * @param listenerId id of the listener.
    * @param grpcClient grpc message client.
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageServer.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageServer.java
index 4d310fc..51bf564 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageServer.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/grpc/GrpcMessageServer.java
@@ -18,13 +18,13 @@
  */
 package org.apache.nemo.runtime.common.message.grpc;
 
+import io.grpc.Server;
+import io.grpc.ServerBuilder;
+import io.grpc.stub.StreamObserver;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.comm.GrpcMessageService;
 import org.apache.nemo.runtime.common.comm.MessageServiceGrpc;
 import org.apache.nemo.runtime.common.message.MessageListener;
-import io.grpc.Server;
-import io.grpc.ServerBuilder;
-import io.grpc.stub.StreamObserver;
 import org.apache.reef.io.network.naming.NameResolver;
 import org.apache.reef.wake.IdentifierFactory;
 import org.apache.reef.wake.remote.address.LocalAddressProvider;
@@ -39,11 +39,11 @@
  * Represent the RPC server that is responsible for responding all messages from other clients. The server tries to
  * bind to a random port, and registers the bounded ip address to the name server with the localSenderId
  * (which is defined in {@link org.apache.nemo.runtime.common.message.MessageParameters.SenderId}).
- *
+ * <p>
  * The listeners, implementations of {@link MessageListener}, should be setup on this class, and then the incoming
  * messages, which contain corresponding listener id as property, are properly dispatched to the registered
  * listeners.
- *
+ * <p>
  * The currently implemented RPC methods are send and request.
  *
  * @see org.apache.nemo.runtime.common.message.MessageSender#send(Object)
@@ -66,10 +66,11 @@
 
   /**
    * Constructor.
+   *
    * @param localAddressProvider local address provider.
-   * @param nameResolver name resolver.
-   * @param idFactory identifier factory.
-   * @param localSenderId id of the local sender.
+   * @param nameResolver         name resolver.
+   * @param idFactory            identifier factory.
+   * @param localSenderId        id of the local sender.
    */
   GrpcMessageServer(final LocalAddressProvider localAddressProvider,
                     final NameResolver nameResolver,
@@ -84,8 +85,9 @@
 
   /**
    * Set up a listener.
+   *
    * @param listenerId id of the listener.
-   * @param listener the message listener.
+   * @param listener   the message listener.
    */
   void setupListener(final String listenerId, final MessageListener<ControlMessage.Message> listener) {
     if (listenerMap.putIfAbsent(listenerId, listener) != null) {
@@ -95,6 +97,7 @@
 
   /**
    * Remove a listener by its id.
+   *
    * @param listenerId id of the listener to remove.
    */
   void removeListener(final String listenerId) {
@@ -110,8 +113,8 @@
   void start() throws Exception {
     // 1. Bind to random port
     this.server = ServerBuilder.forPort(0)
-        .addService(new MessageService())
-        .build();
+      .addService(new MessageService())
+      .build();
 
     // 2. Start the server
     server.start();
@@ -123,6 +126,7 @@
 
   /**
    * For registering to the name server.
+   *
    * @param port port of the socket address.
    * @throws Exception
    */
@@ -139,11 +143,12 @@
     }
 
     throw new Exception("Failed to register id=" + localSenderId + " after "
-        + NAME_SERVER_REGISTER_RETRY_COUNT + " retries");
+      + NAME_SERVER_REGISTER_RETRY_COUNT + " retries");
   }
 
   /**
    * Closes the server.
+   *
    * @throws Exception exception while closing.
    */
   void close() throws Exception {
@@ -161,7 +166,7 @@
      * Receive a message from a client, notify a corresponding listener, if exists, and finish the rpc call by calling
      * {@link StreamObserver#onNext(Object)} with the VOID_MESSAGE and calling {@link StreamObserver#onCompleted()}.
      *
-     * @param message a message from a client
+     * @param message          a message from a client
      * @param responseObserver an observer to control this rpc call
      */
     @Override
@@ -170,14 +175,14 @@
       final MessageListener<ControlMessage.Message> listener = listenerMap.get(message.getListenerId());
       if (listener == null) {
         LOG.warn("A msg is ignored since there is no registered listener. msg.id={}, msg.listenerId={}, msg.type={}",
-            message.getId(), message.getListenerId(), message.getType());
+          message.getId(), message.getListenerId(), message.getType());
         responseObserver.onNext(voidMessage);
         responseObserver.onCompleted();
         return;
       }
 
       LOG.debug("[SEND] request msg.id={}, msg.listenerId={}, msg.type={}",
-          message.getId(), message.getListenerId(), message.getType());
+        message.getId(), message.getListenerId(), message.getType());
       listener.onMessage(message);
       responseObserver.onNext(voidMessage);
       responseObserver.onCompleted();
@@ -188,22 +193,22 @@
      * raises an exception with {@link StreamObserver#onError(Throwable)}. This rpc call will be finished in
      * {@link GrpcMessageContext} since the context only know when the {@link MessageListener} would reply a message.
      *
-     * @param message a message from a client
+     * @param message          a message from a client
      * @param responseObserver an observer to control this rpc call
      */
     @Override
     public void request(final ControlMessage.Message message,
                         final StreamObserver<ControlMessage.Message> responseObserver) {
       LOG.debug("[REQUEST] request msg.id={}, msg.listenerId={}, msg.type={}",
-          message.getId(), message.getListenerId(), message.getType());
+        message.getId(), message.getListenerId(), message.getType());
 
       final MessageListener<ControlMessage.Message> listener = listenerMap.get(message.getListenerId());
       if (listener == null) {
         LOG.warn("A message arrived, which has no registered listener. msg.id={}, msg.listenerId={}, msg.type={}",
-            message.getId(), message.getListenerId(), message.getType());
+          message.getId(), message.getListenerId(), message.getType());
 
         responseObserver.onError(new Exception("There is no registered listener id=" + message.getListenerId()
-            + " for message type=" + message.getType()));
+          + " for message type=" + message.getType()));
         return;
       }
 
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageContext.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageContext.java
index 5a6cc3e..65df46a 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageContext.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageContext.java
@@ -31,8 +31,9 @@
   private Object replyMessage;
 
   /**
-   *  TODO #10: Handle Method Javadocs Requirements for Checkstyle Warnings.
-   * @param senderId  TODO #10: Handle Method Javadocs Requirements for Checkstyle Warnings.
+   * TODO #10: Handle Method Javadocs Requirements for Checkstyle Warnings.
+   *
+   * @param senderId TODO #10: Handle Method Javadocs Requirements for Checkstyle Warnings.
    */
   LocalMessageContext(final String senderId) {
     this.senderId = senderId;
@@ -48,7 +49,8 @@
   }
 
   /**
-   *  TODO #10: Handle Method Javadocs Requirements for Checkstyle Warnings.
+   * TODO #10: Handle Method Javadocs Requirements for Checkstyle Warnings.
+   *
    * @return TODO #10: Handle Method Javadocs Requirements for Checkstyle Warnings.
    */
   public Optional<Object> getReplyMessage() {
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageDispatcher.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageDispatcher.java
index 15c7731..046b7a1 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageDispatcher.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageDispatcher.java
@@ -44,14 +44,14 @@
   }
 
   <T> MessageSender<T> setupListener(
-      final String currentNodeId, final String messageTypeId, final MessageListener<T> listener) {
+    final String currentNodeId, final String messageTypeId, final MessageListener<T> listener) {
 
     ConcurrentMap<String, MessageListener> messageTypeToListenerMap = nodeIdToMessageListenersMap.get(currentNodeId);
 
     if (messageTypeToListenerMap == null) {
       messageTypeToListenerMap = new ConcurrentHashMap<>();
       final ConcurrentMap<String, MessageListener> map = nodeIdToMessageListenersMap.putIfAbsent(
-          currentNodeId, messageTypeToListenerMap);
+        currentNodeId, messageTypeToListenerMap);
       if (map != null) {
         messageTypeToListenerMap = map;
       }
@@ -59,7 +59,7 @@
 
     if (messageTypeToListenerMap.putIfAbsent(messageTypeId, listener) != null) {
       throw new LocalDispatcherException(
-          messageTypeId + " was already used in " + currentNodeId);
+        messageTypeId + " was already used in " + currentNodeId);
     }
 
     return new LocalMessageSender<>(currentNodeId, currentNodeId, messageTypeId, this);
@@ -71,7 +71,7 @@
   }
 
   <T> void dispatchSendMessage(
-      final String targetId, final String messageTypeId, final T message) {
+    final String targetId, final String messageTypeId, final T message) {
     final MessageListener listener = nodeIdToMessageListenersMap.get(targetId).get(messageTypeId);
     if (listener == null) {
       throw new LocalDispatcherException("There was no set up listener for " + messageTypeId + " in " + targetId);
@@ -80,7 +80,7 @@
   }
 
   <T, U> CompletableFuture<U> dispatchRequestMessage(
-      final String senderId, final String targetId, final String messageTypeId, final T message) {
+    final String senderId, final String targetId, final String messageTypeId, final T message) {
 
     final MessageListener listener = nodeIdToMessageListenersMap.get(targetId).get(messageTypeId);
     if (listener == null) {
@@ -119,7 +119,7 @@
    */
   public static Injector forkInjector(final Injector baseInjector) throws InjectionException {
     final Injector injector = baseInjector
-        .forkInjector(LocalMessageEnvironment.LOCAL_MESSAGE_ENVIRONMENT_CONFIGURATION);
+      .forkInjector(LocalMessageEnvironment.LOCAL_MESSAGE_ENVIRONMENT_CONFIGURATION);
     injector.getInstance(LocalMessageDispatcher.class);
     return injector;
   }
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageEnvironment.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageEnvironment.java
index 0b35a45..e8321f3 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageEnvironment.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageEnvironment.java
@@ -39,7 +39,7 @@
 public final class LocalMessageEnvironment implements MessageEnvironment {
   private static final Tang TANG = Tang.Factory.getTang();
   public static final Configuration LOCAL_MESSAGE_ENVIRONMENT_CONFIGURATION = TANG.newConfigurationBuilder()
-      .bindImplementation(MessageEnvironment.class, LocalMessageEnvironment.class).build();
+    .bindImplementation(MessageEnvironment.class, LocalMessageEnvironment.class).build();
 
   private final String currentNodeId;
   private final LocalMessageDispatcher dispatcher;
@@ -53,7 +53,7 @@
 
   @Override
   public <T> void setupListener(
-      final String listenerId, final MessageListener<T> listener) {
+    final String listenerId, final MessageListener<T> listener) {
     dispatcher.setupListener(currentNodeId, listenerId, listener);
   }
 
@@ -64,9 +64,9 @@
 
   @Override
   public <T> Future<MessageSender<T>> asyncConnect(
-      final String targetId, final String messageTypeId) {
+    final String targetId, final String messageTypeId) {
     return CompletableFuture.completedFuture(new LocalMessageSender<T>(
-        currentNodeId, targetId, messageTypeId, dispatcher));
+      currentNodeId, targetId, messageTypeId, dispatcher));
   }
 
   @Override
@@ -84,13 +84,13 @@
    *
    * @param baseInjector provided by {@link LocalMessageDispatcher#getInjector()}
    *                     or {@link LocalMessageDispatcher#forkInjector(Injector)}
-   * @param senderId  the identifier for the sender
+   * @param senderId     the identifier for the sender
    * @return an {@link Injector} which has {@link LocalMessageDispatcher} instance for {@link MessageEnvironment}
    * @throws InjectionException when fails to inject {@link MessageEnvironment}
    */
   public static Injector forkInjector(final Injector baseInjector, final String senderId) throws InjectionException {
     final Injector injector = baseInjector.forkInjector(TANG.newConfigurationBuilder()
-        .bindNamedParameter(MessageParameters.SenderId.class, senderId).build());
+      .bindNamedParameter(MessageParameters.SenderId.class, senderId).build());
     injector.getInstance(MessageEnvironment.class);
     return injector;
   }
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageSender.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageSender.java
index 48e9ce0..4fff3ae 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageSender.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/local/LocalMessageSender.java
@@ -24,6 +24,7 @@
 
 /**
  * A simple {@link MessageSender} implementation that works on a single node.
+ *
  * @param <T> a message type
  */
 public final class LocalMessageSender<T> implements MessageSender<T> {
@@ -35,9 +36,9 @@
   private boolean isClosed;
 
   public LocalMessageSender(final String senderId,
-                     final String targetId,
-                     final String messageTypeId,
-                     final LocalMessageDispatcher dispatcher) {
+                            final String targetId,
+                            final String messageTypeId,
+                            final LocalMessageDispatcher dispatcher) {
     this.senderId = senderId;
     this.targetId = targetId;
     this.messageTypeId = messageTypeId;
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/ControlMessageCodec.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/ControlMessageCodec.java
index 32ef65b..3742698 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/ControlMessageCodec.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/ControlMessageCodec.java
@@ -26,7 +26,7 @@
  * Codec for ControlMessage.
  */
 final class ControlMessageCodec implements Codec<ControlMessage.Message>,
-    org.apache.reef.wake.remote.Codec<ControlMessage.Message> {
+  org.apache.reef.wake.remote.Codec<ControlMessage.Message> {
 
   ControlMessageCodec() {
   }
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/NcsMessageContext.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/NcsMessageContext.java
index 47a57b8..54108ca 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/NcsMessageContext.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/NcsMessageContext.java
@@ -23,7 +23,6 @@
 import org.apache.reef.io.network.Connection;
 import org.apache.reef.io.network.ConnectionFactory;
 import org.apache.reef.wake.IdentifierFactory;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/NcsMessageEnvironment.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/NcsMessageEnvironment.java
index 5535fff..5a45e0c 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/NcsMessageEnvironment.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/NcsMessageEnvironment.java
@@ -60,9 +60,9 @@
 
   @Inject
   private NcsMessageEnvironment(
-      final NetworkConnectionService networkConnectionService,
-      final IdentifierFactory idFactory,
-      @Parameter(MessageParameters.SenderId.class) final String senderId) {
+    final NetworkConnectionService networkConnectionService,
+    final IdentifierFactory idFactory,
+    @Parameter(MessageParameters.SenderId.class) final String senderId) {
     this.networkConnectionService = networkConnectionService;
     this.idFactory = idFactory;
     this.senderId = senderId;
@@ -70,11 +70,11 @@
     this.listenerConcurrentMap = new ConcurrentHashMap<>();
     this.receiverToConnectionMap = new ConcurrentHashMap<>();
     this.connectionFactory = networkConnectionService.registerConnectionFactory(
-        idFactory.getNewInstance(NCS_CONN_FACTORY_ID),
-        new ControlMessageCodec(),
-        new NcsMessageHandler(),
-        new NcsLinkListener(),
-        idFactory.getNewInstance(senderId));
+      idFactory.getNewInstance(NCS_CONN_FACTORY_ID),
+      new ControlMessageCodec(),
+      new NcsMessageHandler(),
+      new NcsLinkListener(),
+      idFactory.getNewInstance(senderId));
   }
 
   @Override
@@ -185,7 +185,7 @@
    * Send: Messages sent without expecting a reply.
    * Request: Messages sent to get a reply.
    * Reply: Messages that reply to a request.
-   *
+   * <p>
    * Not sure these variable names are conventionally used in RPC frameworks...
    * Let's revisit them when we work on
    */
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/NcsMessageSender.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/NcsMessageSender.java
index bb3c558..a302cfd 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/NcsMessageSender.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/message/ncs/NcsMessageSender.java
@@ -22,11 +22,11 @@
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.message.MessageSender;
 import org.apache.reef.io.network.Connection;
-
-import java.util.concurrent.CompletableFuture;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.concurrent.CompletableFuture;
+
 /**
  * MessageSender for NCS.
  */
@@ -37,8 +37,8 @@
   private final ReplyFutureMap<ControlMessage.Message> replyFutureMap;
 
   NcsMessageSender(
-      final Connection<ControlMessage.Message> connection,
-      final ReplyFutureMap replyFutureMap) {
+    final Connection<ControlMessage.Message> connection,
+    final ReplyFutureMap replyFutureMap) {
     this.connection = connection;
     this.replyFutureMap = replyFutureMap;
   }
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/DataTransferEvent.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/DataTransferEvent.java
index 657c094..15664a2 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/DataTransferEvent.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/DataTransferEvent.java
@@ -31,6 +31,7 @@
 
   /**
    * Get transfer type.
+   *
    * @return TransferType.
    */
   public final TransferType getTransferType() {
@@ -39,6 +40,7 @@
 
   /**
    * Set transfer type.
+   *
    * @param transferType TransferType to set.
    */
   public final void setTransferType(final TransferType transferType) {
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/Event.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/Event.java
index 9cf851d..9801aab 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/Event.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/Event.java
@@ -28,6 +28,7 @@
 
   /**
    * Constructor.
+   *
    * @param timestamp timestamp in millisecond.
    */
   public Event(final long timestamp) {
@@ -36,14 +37,16 @@
 
   /**
    * Get timestamp.
+   *
    * @return timestamp.
    */
   public final long getTimestamp() {
     return timestamp;
-  };
+  }
 
   /**
    * Set timestamp.
+   *
    * @param timestamp timestamp in millisecond.
    */
   public final void setTimestamp(final long timestamp) {
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/JobMetric.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/JobMetric.java
index 3b63cff..85fcc8e 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/JobMetric.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/JobMetric.java
@@ -50,6 +50,7 @@
 
   /**
    * Constructor.
+   *
    * @param physicalPlan physical plan to derive the id from.
    */
   public JobMetric(final PhysicalPlan physicalPlan) {
@@ -58,6 +59,7 @@
 
   /**
    * Constructor with the designated id.
+   *
    * @param id the id.
    */
   public JobMetric(final String id) {
@@ -88,6 +90,7 @@
 
   /**
    * Setter for the IR DAG.
+   *
    * @param irDag the IR DAG.
    */
   public void setIRDAG(final IRDAG irDag) {
@@ -120,6 +123,7 @@
 
   /**
    * Setter for the stage DAG.
+   *
    * @param dag the stage DAG.
    */
   public void setStageDAG(final DAG<Stage, StageEdge> dag) {
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/Metric.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/Metric.java
index fd57492..5880838 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/Metric.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/Metric.java
@@ -24,12 +24,14 @@
 public interface Metric {
   /**
    * Get its unique id.
+   *
    * @return an unique id
    */
   String getId();
 
   /**
    * Process metric message from evaluators.
+   *
    * @param metricField field name of the metric.
    * @param metricValue byte array of serialized data value.
    * @return true if the metric was changed or false if not.
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/MetricUtils.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/MetricUtils.java
index 23fdef7..4f77297 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/MetricUtils.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/MetricUtils.java
@@ -75,6 +75,7 @@
 
   /**
    * Load the BiMaps (lightweight) Metadata from the DB.
+   *
    * @return the loaded BiMaps, or initialized ones.
    */
   private static Pair<HashBiMap<Integer, Class<? extends ExecutionProperty>>,
@@ -86,7 +87,7 @@
 
         statement.executeUpdate(
           "CREATE TABLE IF NOT EXISTS " + METADATA_TABLE_NAME
-          + " (key TEXT NOT NULL UNIQUE, data BYTEA NOT NULL);");
+            + " (key TEXT NOT NULL UNIQUE, data BYTEA NOT NULL);");
 
         final ResultSet rsl = statement.executeQuery(
           "SELECT * FROM " + METADATA_TABLE_NAME + " WHERE key='EP_KEY_METADATA';");
@@ -162,7 +163,7 @@
         if (MUST_UPDATE_EP_METADATA.getCount() == 0) {
           try (final PreparedStatement pstmt =
                  c.prepareStatement("INSERT INTO " + METADATA_TABLE_NAME + "(key, data) "
-                     + "VALUES ('EP_METADATA', ?) ON CONFLICT (key) DO UPDATE SET data = excluded.data;")) {
+                   + "VALUES ('EP_METADATA', ?) ON CONFLICT (key) DO UPDATE SET data = excluded.data;")) {
             pstmt.setBinaryStream(1,
               new ByteArrayInputStream(SerializationUtils.serialize(EP_METADATA)));
             pstmt.executeUpdate();
@@ -177,6 +178,7 @@
 
   /**
    * Stringify execution properties of an IR DAG.
+   *
    * @param irdag IR DAG to observe.
    * @return the pair of stringified execution properties. Left is for vertices, right is for edges.
    */
@@ -200,10 +202,11 @@
 
   /**
    * Formatter for execution properties. It updates the metadata for the metrics if new EP key / values are discovered.
-   * @param builder string builder to append the metrics to.
-   * @param idx index specifying whether it's a vertex or an edge. This should be one digit.
+   *
+   * @param builder   string builder to append the metrics to.
+   * @param idx       index specifying whether it's a vertex or an edge. This should be one digit.
    * @param numericId numeric ID of the vertex or the edge.
-   * @param ep the execution property.
+   * @param ep        the execution property.
    */
   private static void epFormatter(final StringBuilder builder, final int idx,
                                   final Integer numericId, final ExecutionProperty<?> ep) {
@@ -227,8 +230,9 @@
   /**
    * Helper method to convert Execution Property value objects to an integer index.
    * It updates the metadata for the metrics if new EP values are discovered.
+   *
    * @param epKeyIndex the index of the execution property key.
-   * @param ep the execution property containing the value.
+   * @param ep         the execution property containing the value.
    * @return the converted value index.
    */
   private static Integer valueToIndex(final Integer epKeyIndex, final ExecutionProperty<?> ep) {
@@ -269,6 +273,7 @@
 
   /**
    * Finds the project root path.
+   *
    * @return the project root path.
    */
   private static String fetchProjectRootPath() {
@@ -277,6 +282,7 @@
 
   /**
    * Helper method to recursively find the LICENSE file.
+   *
    * @param path the path to search for.
    * @return the path containing the LICENSE file.
    */
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/StateMetric.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/StateMetric.java
index af61741..9ad4d6a 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/StateMetric.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/StateMetric.java
@@ -23,19 +23,22 @@
 
 /**
  * Interface for metric which contians its state.
+ *
  * @param <T> class of state of the metric.
  */
 public interface StateMetric<T extends Serializable> extends Metric {
   /**
    * Get its list of {@link StateTransitionEvent}.
+   *
    * @return list of events.
    */
   List<StateTransitionEvent<T>> getStateTransitionEvents();
 
   /**
    * Add a {@link StateTransitionEvent} to the metric.
+   *
    * @param prevState previous state.
-   * @param newState new state.
+   * @param newState  new state.
    */
   void addEvent(final T prevState, final T newState);
 }
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/StateTransitionEvent.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/StateTransitionEvent.java
index 5149925..536d83a 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/StateTransitionEvent.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/StateTransitionEvent.java
@@ -22,6 +22,7 @@
 
 /**
  * Event of state transition. It contains timestamp and the state transition.
+ *
  * @param <T> class of state for the metric.
  */
 public final class StateTransitionEvent<T extends Serializable> extends Event {
@@ -36,6 +37,7 @@
 
   /**
    * Get previous state.
+   *
    * @return previous state.
    */
   public T getPrevState() {
@@ -44,6 +46,7 @@
 
   /**
    * Get new state.
+   *
    * @return new state.
    */
   public T getNewState() {
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/TaskMetric.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/TaskMetric.java
index 67d1520..57833bb 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/TaskMetric.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/TaskMetric.java
@@ -18,15 +18,14 @@
  */
 package org.apache.nemo.runtime.common.metric;
 
-import org.apache.nemo.runtime.common.state.TaskState;
 import org.apache.commons.lang3.SerializationUtils;
+import org.apache.nemo.runtime.common.state.TaskState;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.List;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * Metric class for {@link org.apache.nemo.runtime.common.plan.Task}.
  */
@@ -143,7 +142,7 @@
         break;
       case "stateTransitionEvent":
         final StateTransitionEvent<TaskState.State> newStateTransitionEvent =
-            SerializationUtils.deserialize(metricValue);
+          SerializationUtils.deserialize(metricValue);
         addEvent(newStateTransitionEvent);
         break;
       case "scheduleAttempt":
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/PhysicalPlanGenerator.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/PhysicalPlanGenerator.java
index fa5e877..b74dbd4 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/PhysicalPlanGenerator.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/PhysicalPlanGenerator.java
@@ -18,22 +18,24 @@
  */
 package org.apache.nemo.runtime.common.plan;
 
+import org.apache.nemo.common.dag.DAG;
+import org.apache.nemo.common.dag.DAGBuilder;
+import org.apache.nemo.common.exception.IllegalVertexOperationException;
+import org.apache.nemo.common.exception.PhysicalPlanGenerationException;
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.common.ir.Readable;
+import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.edge.executionproperty.DuplicateEdgeGroupProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.DuplicateEdgeGroupPropertyValue;
 import org.apache.nemo.common.ir.executionproperty.ExecutionPropertyMap;
 import org.apache.nemo.common.ir.executionproperty.VertexExecutionProperty;
-import org.apache.nemo.common.ir.vertex.*;
+import org.apache.nemo.common.ir.vertex.IRVertex;
+import org.apache.nemo.common.ir.vertex.OperatorVertex;
+import org.apache.nemo.common.ir.vertex.SourceVertex;
 import org.apache.nemo.common.ir.vertex.executionproperty.ParallelismProperty;
 import org.apache.nemo.common.ir.vertex.executionproperty.ScheduleGroupProperty;
 import org.apache.nemo.common.ir.vertex.utility.SamplingVertex;
 import org.apache.nemo.conf.JobConf;
-import org.apache.nemo.common.dag.DAG;
-import org.apache.nemo.common.dag.DAGBuilder;
-import org.apache.nemo.common.ir.edge.IREdge;
-import org.apache.nemo.common.exception.IllegalVertexOperationException;
-import org.apache.nemo.common.exception.PhysicalPlanGenerationException;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.reef.tang.annotations.Parameter;
 import org.slf4j.Logger;
@@ -278,6 +280,7 @@
 
   /**
    * Integrity check for Stage.
+   *
    * @param stage to check for
    */
   private void integrityCheck(final Stage stage) {
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/PlanRewriter.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/PlanRewriter.java
index 8140d2d..7b7ab6d 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/PlanRewriter.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/PlanRewriter.java
@@ -24,14 +24,14 @@
 public interface PlanRewriter {
   /**
    * @param currentPhysicalPlan to rewrite.
-   * @param messageId of the rewrite.
+   * @param messageId           of the rewrite.
    * @return physical plan.
    */
   PhysicalPlan rewrite(final PhysicalPlan currentPhysicalPlan, final int messageId);
 
   /**
    * @param messageId of the rewrite.
-   * @param data to accumulate.
+   * @param data      to accumulate.
    */
   void accumulate(final int messageId, final Object data);
 }
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/RuntimeEdge.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/RuntimeEdge.java
index bf98a86..fc7a331 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/RuntimeEdge.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/RuntimeEdge.java
@@ -30,6 +30,7 @@
 
 /**
  * Represents the edge between vertices in a logical/physical plan in runtime.
+ *
  * @param <V> the vertex type.
  */
 public class RuntimeEdge<V extends Vertex> extends Edge<V> {
@@ -38,10 +39,10 @@
   /**
    * Constructs the edge given the below parameters.
    *
-   * @param runtimeEdgeId  the id of this edge.
+   * @param runtimeEdgeId       the id of this edge.
    * @param executionProperties to control the data flow on this edge.
-   * @param src            the source vertex.
-   * @param dst            the destination vertex.
+   * @param src                 the source vertex.
+   * @param dst                 the destination vertex.
    */
   public RuntimeEdge(final String runtimeEdgeId,
                      final ExecutionPropertyMap<EdgeExecutionProperty> executionProperties,
@@ -59,13 +60,13 @@
    * @return the execution property.
    */
   public final <T extends Serializable> Optional<T> getPropertyValue(
-      final Class<? extends EdgeExecutionProperty<T>> executionPropertyKey) {
+    final Class<? extends EdgeExecutionProperty<T>> executionPropertyKey) {
     return executionProperties.get(executionPropertyKey);
   }
 
   /**
    * @param executionPropertyKey key
-   * @param <T> type
+   * @param <T>                  type
    * @return the value
    */
   public final <T extends Serializable> T getPropertyValueOrRuntimeException(
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/Stage.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/Stage.java
index 070ca62..45b5029 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/Stage.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/Stage.java
@@ -20,6 +20,7 @@
 
 import com.fasterxml.jackson.databind.node.JsonNodeFactory;
 import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.apache.commons.lang3.SerializationUtils;
 import org.apache.nemo.common.dag.DAG;
 import org.apache.nemo.common.dag.Vertex;
 import org.apache.nemo.common.ir.Readable;
@@ -28,7 +29,6 @@
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.vertex.executionproperty.ParallelismProperty;
 import org.apache.nemo.common.ir.vertex.executionproperty.ScheduleGroupProperty;
-import org.apache.commons.lang3.SerializationUtils;
 
 import java.io.Serializable;
 import java.util.List;
@@ -104,7 +104,7 @@
    */
   public int getScheduleGroup() {
     return executionProperties.get(ScheduleGroupProperty.class)
-        .orElseThrow(() -> new RuntimeException("ScheduleGroup property must be set for Stage"));
+      .orElseThrow(() -> new RuntimeException("ScheduleGroup property must be set for Stage"));
   }
 
   /**
@@ -129,7 +129,7 @@
    * @return the execution property.
    */
   public <T extends Serializable> Optional<T> getPropertyValue(
-      final Class<? extends VertexExecutionProperty<T>> executionPropertyKey) {
+    final Class<? extends VertexExecutionProperty<T>> executionPropertyKey) {
     return executionProperties.get(executionPropertyKey);
   }
 
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/StageEdge.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/StageEdge.java
index 04c0f74..3430180 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/StageEdge.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/StageEdge.java
@@ -21,15 +21,15 @@
 import com.fasterxml.jackson.databind.node.JsonNodeFactory;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.nemo.common.HashRange;
 import org.apache.nemo.common.KeyRange;
 import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.DataFlowProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.PartitionSetProperty;
 import org.apache.nemo.common.ir.executionproperty.EdgeExecutionProperty;
-import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.executionproperty.ExecutionPropertyMap;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -92,7 +92,7 @@
   }
 
   @Override
-    public ObjectNode getPropertiesAsJsonNode() {
+  public ObjectNode getPropertiesAsJsonNode() {
     final ObjectNode node = JsonNodeFactory.instance.objectNode();
     node.put("runtimeEdgeId", getId());
     node.set("executionProperties", getExecutionProperties().asJsonNode());
@@ -159,7 +159,7 @@
    */
   public List<KeyRange> getKeyRanges() {
     final ArrayList<KeyRange> defaultPartitionSet = new ArrayList<>();
-    for (int taskIndex = 0; taskIndex <  getDst().getParallelism(); taskIndex++) {
+    for (int taskIndex = 0; taskIndex < getDst().getParallelism(); taskIndex++) {
       defaultPartitionSet.add(taskIndex, HashRange.of(taskIndex, taskIndex + 1));
     }
     final List<KeyRange> keyRanges = getExecutionProperties()
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/StagePartitioner.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/StagePartitioner.java
index 0a3f088..3563fac 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/StagePartitioner.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/StagePartitioner.java
@@ -18,16 +18,18 @@
  */
 package org.apache.nemo.runtime.common.plan;
 
+import net.jcip.annotations.ThreadSafe;
+import org.apache.commons.lang3.mutable.MutableInt;
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
 import org.apache.nemo.common.ir.executionproperty.VertexExecutionProperty;
 import org.apache.nemo.common.ir.vertex.IRVertex;
-import net.jcip.annotations.ThreadSafe;
-import org.apache.commons.lang3.mutable.MutableInt;
 import org.apache.reef.annotations.audience.DriverSide;
 
-import java.util.*;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.function.Function;
 import java.util.stream.Collectors;
@@ -36,9 +38,9 @@
  * A function that is responsible for stage partitioning on IR DAG.
  * Each stage becomes maximal set of {@link IRVertex} such that
  * <ul>
- *   <li>branches and non-OneToOne edges are not allowed within a stage, and</li>
- *   <li>all vertices in a stage should have same {@link VertexExecutionProperty} map,
- *   except for the ignored properties.</li>
+ * <li>branches and non-OneToOne edges are not allowed within a stage, and</li>
+ * <li>all vertices in a stage should have same {@link VertexExecutionProperty} map,
+ * except for the ignored properties.</li>
  * </ul>
  */
 @DriverSide
@@ -52,6 +54,7 @@
    * same set of {@link VertexExecutionProperty}.
    * Invoking this method will make the stage partitioner ignore a specific property during comparing
    * the execution property maps.
+   *
    * @param ignoredPropertyKey a property that will be ignored during the stage partitioning.
    */
   public void addIgnoredPropertyKey(final Class<? extends VertexExecutionProperty> ignoredPropertyKey) {
@@ -94,7 +97,7 @@
 
   /**
    * @param edge an {@link IREdge}.
-   * @param dag IR DAG which contains {@code edge}
+   * @param dag  IR DAG which contains {@code edge}
    * @return {@code true} if and only if the source and the destination vertex of the edge can be merged into one stage.
    */
   private boolean testMergeability(final IREdge edge, final IRDAG dag) {
@@ -104,7 +107,7 @@
     }
     // If the edge is not OneToOne, return false
     if (edge.getPropertyValue(CommunicationPatternProperty.class).get()
-        != CommunicationPatternProperty.Value.OneToOne) {
+      != CommunicationPatternProperty.Value.OneToOne) {
       return false;
     }
     // Return true if and only if the execution properties of the two vertices are compatible
@@ -117,7 +120,7 @@
    */
   public Set<VertexExecutionProperty> getStageProperties(final IRVertex vertex) {
     return vertex.getExecutionProperties().stream()
-        .filter(p -> !ignoredPropertyKeys.contains(p.getClass()))
-        .collect(Collectors.toSet());
+      .filter(p -> !ignoredPropertyKeys.contains(p.getClass()))
+      .collect(Collectors.toSet());
   }
 }
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/Task.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/Task.java
index 6def9ea..737b8a5 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/Task.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/Task.java
@@ -24,7 +24,9 @@
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 
 import java.io.Serializable;
-import java.util.*;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
 
 /**
  * A Task (attempt) is a self-contained executable that can be executed on a machine.
@@ -122,7 +124,7 @@
    * @return the execution property.
    */
   public <T extends Serializable> Optional<T> getPropertyValue(
-      final Class<? extends VertexExecutionProperty<T>> executionPropertyKey) {
+    final Class<? extends VertexExecutionProperty<T>> executionPropertyKey) {
     return executionProperties.get(executionPropertyKey);
   }
 
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/BlockState.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/BlockState.java
index bc3eb50..a5d0a41 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/BlockState.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/BlockState.java
@@ -41,7 +41,7 @@
     // From IN_PROGRESS
     stateMachineBuilder.addTransition(State.IN_PROGRESS, State.AVAILABLE, "The block is successfully created");
     stateMachineBuilder.addTransition(State.IN_PROGRESS, State.NOT_AVAILABLE,
-        "The block is lost before being created");
+      "The block is lost before being created");
 
     // From AVAILABLE
     stateMachineBuilder.addTransition(State.AVAILABLE, State.NOT_AVAILABLE, "The block is not available");
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/PlanState.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/PlanState.java
index 72b2fbe..85908c4 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/PlanState.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/PlanState.java
@@ -41,11 +41,11 @@
 
     // Add transitions
     stateMachineBuilder.addTransition(State.READY, State.EXECUTING,
-        "Begin executing!");
+      "Begin executing!");
     stateMachineBuilder.addTransition(State.EXECUTING, State.COMPLETE,
-        "All stages complete, plan complete");
+      "All stages complete, plan complete");
     stateMachineBuilder.addTransition(State.EXECUTING, State.FAILED,
-        "Unrecoverable failure in a stage");
+      "Unrecoverable failure in a stage");
 
     stateMachineBuilder.setInitialState(State.READY);
 
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/StageState.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/StageState.java
index 43eb50e..ce9bf94 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/StageState.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/StageState.java
@@ -22,7 +22,7 @@
 
 /**
  * Represents the states and their transitions of a stage.
- *
+ * <p>
  * Maintained as simple two (INCOMPLETE, COMPLETE) states to avoid ambiguity when the tasks are in different states.
  * For example it is not clear whether a stage should be EXECUTING or SHOULD_RESTART, if one of the tasks in the stage
  * is EXECUTING, and another is SHOULD_RESTART.
@@ -43,10 +43,10 @@
 
     // Add transitions
     stateMachineBuilder.addTransition(
-        State.INCOMPLETE, State.INCOMPLETE, "A task in the stage needs to be retried");
+      State.INCOMPLETE, State.INCOMPLETE, "A task in the stage needs to be retried");
     stateMachineBuilder.addTransition(State.INCOMPLETE, State.COMPLETE, "All tasks complete");
     stateMachineBuilder.addTransition(State.COMPLETE, State.INCOMPLETE,
-        "Completed before, but a task in this stage should be retried");
+      "Completed before, but a task in this stage should be retried");
     stateMachineBuilder.addTransition(State.COMPLETE, State.COMPLETE,
       "Completed before, but probably a cloned task has completed again");
 
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/TaskState.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/TaskState.java
index ccdc5e7..3c073ac 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/TaskState.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/state/TaskState.java
@@ -60,7 +60,7 @@
 
     // From SHOULD_RETRY
     stateMachineBuilder.addTransition(State.SHOULD_RETRY, State.SHOULD_RETRY,
-        "SHOULD_RETRY can be caused by multiple reasons");
+      "SHOULD_RETRY can be caused by multiple reasons");
 
     stateMachineBuilder.setInitialState(State.READY);
     return stateMachineBuilder.build();
diff --git a/runtime/common/src/test/java/org/apache/nemo/runtime/common/message/local/LocalMessageTest.java b/runtime/common/src/test/java/org/apache/nemo/runtime/common/message/local/LocalMessageTest.java
index aa011bb..24c9375 100644
--- a/runtime/common/src/test/java/org/apache/nemo/runtime/common/message/local/LocalMessageTest.java
+++ b/runtime/common/src/test/java/org/apache/nemo/runtime/common/message/local/LocalMessageTest.java
@@ -45,18 +45,18 @@
     final String listenerIdBetweenExecutors = "BetweenExecutors";
 
     final Injector injector = TANG.newInjector(TANG.newConfigurationBuilder()
-        .bindImplementation(MessageEnvironment.class, LocalMessageEnvironment.class).build());
+      .bindImplementation(MessageEnvironment.class, LocalMessageEnvironment.class).build());
     injector.getInstance(LocalMessageDispatcher.class);
 
     final MessageEnvironment driverEnv = injector.forkInjector(TANG.newConfigurationBuilder()
-        .bindNamedParameter(MessageParameters.SenderId.class, driverNodeId).build())
-        .getInstance(MessageEnvironment.class);
+      .bindNamedParameter(MessageParameters.SenderId.class, driverNodeId).build())
+      .getInstance(MessageEnvironment.class);
     final MessageEnvironment executorOneEnv = injector.forkInjector(TANG.newConfigurationBuilder()
-        .bindNamedParameter(MessageParameters.SenderId.class, executorOneNodeId).build())
-        .getInstance(MessageEnvironment.class);
+      .bindNamedParameter(MessageParameters.SenderId.class, executorOneNodeId).build())
+      .getInstance(MessageEnvironment.class);
     final MessageEnvironment executorTwoEnv = injector.forkInjector(TANG.newConfigurationBuilder()
-        .bindNamedParameter(MessageParameters.SenderId.class, executorTwoNodeId).build())
-        .getInstance(MessageEnvironment.class);
+      .bindNamedParameter(MessageParameters.SenderId.class, executorTwoNodeId).build())
+      .getInstance(MessageEnvironment.class);
 
     final AtomicInteger toDriverMessageUsingSend = new AtomicInteger();
 
@@ -86,12 +86,12 @@
     // Test sending message from executors to the driver.
 
     final Future<MessageSender<ToDriver>> messageSenderFuture1 = executorOneEnv.asyncConnect(
-        driverNodeId, listenerIdToDriver);
+      driverNodeId, listenerIdToDriver);
     Assert.assertTrue(messageSenderFuture1.isDone());
     final MessageSender<ToDriver> messageSender1 = messageSenderFuture1.get();
 
     final Future<MessageSender<ToDriver>> messageSenderFuture2 = executorTwoEnv.asyncConnect(
-        driverNodeId, listenerIdToDriver);
+      driverNodeId, listenerIdToDriver);
     Assert.assertTrue(messageSenderFuture2.isDone());
     final MessageSender<ToDriver> messageSender2 = messageSenderFuture2.get();
 
@@ -111,9 +111,9 @@
     executorTwoEnv.setupListener(listenerIdBetweenExecutors, new SimpleMessageListener(executorTwoMessageCount));
 
     final MessageSender<BetweenExecutors> oneToTwo = executorOneEnv.<BetweenExecutors>asyncConnect(
-        executorTwoNodeId, listenerIdBetweenExecutors).get();
+      executorTwoNodeId, listenerIdBetweenExecutors).get();
     final MessageSender<BetweenExecutors> twoToOne = executorTwoEnv.<BetweenExecutors>asyncConnect(
-        executorOneNodeId, listenerIdBetweenExecutors).get();
+      executorOneNodeId, listenerIdBetweenExecutors).get();
 
     Assert.assertEquals("oneToTwo", oneToTwo.<String>request(new SimpleMessage("oneToTwo")).get());
     Assert.assertEquals("twoToOne", twoToOne.<String>request(new SimpleMessage("twoToOne")).get());
@@ -132,9 +132,9 @@
     executorTwoEnv.setupListener(listenerIdBetweenExecutors, new SimpleMessageListener(newExecutorTwoMessageCount));
 
     final MessageSender<BetweenExecutors> newOneToTwo = executorOneEnv.<BetweenExecutors>asyncConnect(
-        executorTwoNodeId, listenerIdBetweenExecutors).get();
+      executorTwoNodeId, listenerIdBetweenExecutors).get();
     final MessageSender<BetweenExecutors> newTwoToOne = executorTwoEnv.<BetweenExecutors>asyncConnect(
-        executorOneNodeId, listenerIdBetweenExecutors).get();
+      executorOneNodeId, listenerIdBetweenExecutors).get();
 
     Assert.assertEquals("newOneToTwo", newOneToTwo.<String>request(new SimpleMessage("newOneToTwo")).get());
     Assert.assertEquals("newTwoToOne", newTwoToOne.<String>request(new SimpleMessage("newTwoToOne")).get());
@@ -178,6 +178,7 @@
 
   final class SimpleMessage implements BetweenExecutors {
     private final String data;
+
     SimpleMessage(final String data) {
       this.data = data;
     }
diff --git a/runtime/common/src/test/java/org/apache/nemo/runtime/common/plan/PhysicalPlanGeneratorTest.java b/runtime/common/src/test/java/org/apache/nemo/runtime/common/plan/PhysicalPlanGeneratorTest.java
index ad6c628..ebc0268 100644
--- a/runtime/common/src/test/java/org/apache/nemo/runtime/common/plan/PhysicalPlanGeneratorTest.java
+++ b/runtime/common/src/test/java/org/apache/nemo/runtime/common/plan/PhysicalPlanGeneratorTest.java
@@ -49,11 +49,11 @@
     final IRVertex v0 = newIRVertex(0, 5);
     final IRVertex v1 = newIRVertex(0, 3);
     final IRDAG irDAG = new IRDAG(new DAGBuilder<IRVertex, IREdge>()
-        .addVertex(v0)
-        .addVertex(v1)
-        .connectVertices(newIREdge(v0, v1, CommunicationPatternProperty.Value.OneToOne,
-            DataFlowProperty.Value.Pull))
-        .buildWithoutSourceSinkCheck());
+      .addVertex(v0)
+      .addVertex(v1)
+      .connectVertices(newIREdge(v0, v1, CommunicationPatternProperty.Value.OneToOne,
+        DataFlowProperty.Value.Pull))
+      .buildWithoutSourceSinkCheck());
 
     final DAG<Stage, StageEdge> stageDAG = physicalPlanGenerator.apply(irDAG);
     final Iterator<Stage> stages = stageDAG.getVertices().iterator();
diff --git a/runtime/common/src/test/java/org/apache/nemo/runtime/common/plan/StagePartitionerTest.java b/runtime/common/src/test/java/org/apache/nemo/runtime/common/plan/StagePartitionerTest.java
index 691e15b..7d04ee4 100644
--- a/runtime/common/src/test/java/org/apache/nemo/runtime/common/plan/StagePartitionerTest.java
+++ b/runtime/common/src/test/java/org/apache/nemo/runtime/common/plan/StagePartitionerTest.java
@@ -25,8 +25,10 @@
 import org.apache.nemo.common.ir.executionproperty.VertexExecutionProperty;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.vertex.OperatorVertex;
-import org.apache.nemo.common.ir.vertex.executionproperty.*;
+import org.apache.nemo.common.ir.vertex.executionproperty.IgnoreSchedulingTempDataReceiverProperty;
+import org.apache.nemo.common.ir.vertex.executionproperty.ParallelismProperty;
 import org.apache.nemo.common.ir.vertex.executionproperty.ResourcePriorityProperty;
+import org.apache.nemo.common.ir.vertex.executionproperty.ScheduleGroupProperty;
 import org.apache.reef.tang.exceptions.InjectionException;
 import org.junit.Before;
 import org.junit.Test;
@@ -53,8 +55,8 @@
   }
 
   /**
-   * @param parallelism {@link ParallelismProperty} value for the new vertex
-   * @param scheduleGroup {@link ScheduleGroupProperty} value for the new vertex
+   * @param parallelism     {@link ParallelismProperty} value for the new vertex
+   * @param scheduleGroup   {@link ScheduleGroupProperty} value for the new vertex
    * @param otherProperties other {@link VertexExecutionProperty} for the new vertex
    * @return new {@link IRVertex}
    */
@@ -135,7 +137,7 @@
   public void testSplitByOtherProperty() {
     final DAGBuilder<IRVertex, IREdge> dagBuilder = new DAGBuilder<>();
     final IRVertex v0 = newVertex(1, 0,
-        Arrays.asList(ResourcePriorityProperty.of(ResourcePriorityProperty.RESERVED)));
+      Arrays.asList(ResourcePriorityProperty.of(ResourcePriorityProperty.RESERVED)));
     final IRVertex v1 = newVertex(1, 0, Collections.emptyList());
     dagBuilder.addVertex(v0);
     dagBuilder.addVertex(v1);
@@ -151,7 +153,7 @@
   public void testNotSplitByIgnoredProperty() {
     final DAGBuilder<IRVertex, IREdge> dagBuilder = new DAGBuilder<>();
     final IRVertex v0 = newVertex(1, 0,
-        Arrays.asList(IgnoreSchedulingTempDataReceiverProperty.of()));
+      Arrays.asList(IgnoreSchedulingTempDataReceiverProperty.of()));
     final IRVertex v1 = newVertex(1, 0, Collections.emptyList());
     dagBuilder.addVertex(v0);
     dagBuilder.addVertex(v1);
diff --git a/runtime/driver/pom.xml b/runtime/driver/pom.xml
index 2707c3d..6735939 100644
--- a/runtime/driver/pom.xml
+++ b/runtime/driver/pom.xml
@@ -17,53 +17,54 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-runtime</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-runtime</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
 
-    <artifactId>nemo-driver</artifactId>
-    <name>Nemo Driver</name>
+  <artifactId>nemo-driver</artifactId>
+  <name>Nemo Driver</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-compiler-backend</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-compiler-optimizer</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-runtime-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-runtime-executor</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-runtime-master</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-conf</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-compiler-backend</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-compiler-optimizer</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-runtime-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-runtime-executor</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-runtime-master</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-conf</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
 </project>
diff --git a/runtime/driver/src/main/java/org/apache/nemo/driver/NemoDriver.java b/runtime/driver/src/main/java/org/apache/nemo/driver/NemoDriver.java
index ed2b20e..3a981db 100644
--- a/runtime/driver/src/main/java/org/apache/nemo/driver/NemoDriver.java
+++ b/runtime/driver/src/main/java/org/apache/nemo/driver/NemoDriver.java
@@ -18,17 +18,17 @@
  */
 package org.apache.nemo.driver;
 
+import org.apache.commons.lang3.SerializationUtils;
+import org.apache.commons.lang3.concurrent.BasicThreadFactory;
 import org.apache.nemo.common.ir.IdManager;
 import org.apache.nemo.compiler.optimizer.pass.compiletime.annotating.ResourceSitePass;
 import org.apache.nemo.conf.JobConf;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.message.MessageParameters;
-import org.apache.nemo.runtime.master.ClientRPC;
 import org.apache.nemo.runtime.master.BroadcastManagerMaster;
+import org.apache.nemo.runtime.master.ClientRPC;
 import org.apache.nemo.runtime.master.RuntimeMaster;
-import org.apache.commons.lang3.SerializationUtils;
-import org.apache.commons.lang3.concurrent.BasicThreadFactory;
 import org.apache.reef.annotations.audience.DriverSide;
 import org.apache.reef.driver.client.JobMessageObserver;
 import org.apache.reef.driver.context.ActiveContext;
@@ -81,7 +81,7 @@
   private final ClientRPC clientRPC;
 
   private static ExecutorService runnerThread = Executors.newSingleThreadExecutor(
-      new BasicThreadFactory.Builder().namingPattern("User App thread-%d").build());
+    new BasicThreadFactory.Builder().namingPattern("User App thread-%d").build());
 
   // Client for sending log messages
   private final RemoteClientMessageLoggingHandler handler;
@@ -120,7 +120,7 @@
     clientRPC.registerHandler(ControlMessage.ClientToDriverMessageType.DriverShutdown, message -> shutdown());
     // Send DriverStarted message to the client
     clientRPC.send(ControlMessage.DriverToClientMessage.newBuilder()
-        .setType(ControlMessage.DriverToClientMessageType.DriverStarted).build());
+      .setType(ControlMessage.DriverToClientMessageType.DriverStarted).build());
   }
 
   /**
@@ -159,7 +159,7 @@
     public void onNext(final AllocatedEvaluator allocatedEvaluator) {
       final String executorId = RuntimeIdManager.generateExecutorId();
       runtimeMaster.onContainerAllocated(executorId, allocatedEvaluator,
-          getExecutorConfiguration(executorId));
+        getExecutorConfiguration(executorId));
     }
   }
 
@@ -173,7 +173,7 @@
 
       if (finalExecutorLaunched) {
         clientRPC.send(ControlMessage.DriverToClientMessage.newBuilder()
-            .setType(ControlMessage.DriverToClientMessageType.DriverReady).build());
+          .setType(ControlMessage.DriverToClientMessageType.DriverReady).build());
       }
     }
   }
@@ -181,14 +181,14 @@
   /**
    * Start to schedule a submitted user DAG.
    *
-   * @param dagString  the serialized DAG to schedule.
+   * @param dagString the serialized DAG to schedule.
    */
   private void startSchedulingUserDAG(final String dagString) {
     runnerThread.execute(() -> {
       userApplicationRunner.run(dagString);
       // send driver notification that user application is done.
       clientRPC.send(ControlMessage.DriverToClientMessage.newBuilder()
-          .setType(ControlMessage.DriverToClientMessageType.ExecutionDone).build());
+        .setType(ControlMessage.DriverToClientMessageType.ExecutionDone).build());
       // flush metrics
       runtimeMaster.flushMetrics();
     });
@@ -211,7 +211,7 @@
     @Override
     public void onNext(final FailedContext failedContext) {
       throw new RuntimeException(failedContext.getId() + " failed. See driver's log for the stack trace in executor.",
-          failedContext.asError());
+        failedContext.asError());
     }
   }
 
@@ -228,19 +228,19 @@
 
   private Configuration getExecutorConfiguration(final String executorId) {
     final Configuration executorConfiguration = JobConf.EXECUTOR_CONF
-        .set(JobConf.EXECUTOR_ID, executorId)
-        .set(JobConf.GLUSTER_DISK_DIRECTORY, glusterDirectory)
-        .set(JobConf.LOCAL_DISK_DIRECTORY, localDirectory)
-        .set(JobConf.JOB_ID, jobId)
-        .build();
+      .set(JobConf.EXECUTOR_ID, executorId)
+      .set(JobConf.GLUSTER_DISK_DIRECTORY, glusterDirectory)
+      .set(JobConf.LOCAL_DISK_DIRECTORY, localDirectory)
+      .set(JobConf.JOB_ID, jobId)
+      .build();
 
     final Configuration contextConfiguration = ContextConfiguration.CONF
-        .set(ContextConfiguration.IDENTIFIER, executorId) // We set: contextId = executorId
-        .set(ContextConfiguration.ON_CONTEXT_STARTED, NemoContext.ContextStartHandler.class)
-        .set(ContextConfiguration.ON_CONTEXT_STOP, NemoContext.ContextStopHandler.class)
-        .build();
+      .set(ContextConfiguration.IDENTIFIER, executorId) // We set: contextId = executorId
+      .set(ContextConfiguration.ON_CONTEXT_STARTED, NemoContext.ContextStartHandler.class)
+      .set(ContextConfiguration.ON_CONTEXT_STOP, NemoContext.ContextStopHandler.class)
+      .build();
 
-    final Configuration ncsConfiguration =  getExecutorNcsConfiguration();
+    final Configuration ncsConfiguration = getExecutorNcsConfiguration();
     final Configuration messageConfiguration = getExecutorMessageConfiguration(executorId);
 
     return Configurations.merge(executorConfiguration, contextConfiguration, ncsConfiguration, messageConfiguration);
@@ -248,15 +248,15 @@
 
   private Configuration getExecutorNcsConfiguration() {
     return Tang.Factory.getTang().newConfigurationBuilder()
-        .bindNamedParameter(NameResolverNameServerPort.class, Integer.toString(nameServer.getPort()))
-        .bindNamedParameter(NameResolverNameServerAddr.class, localAddressProvider.getLocalAddress())
-        .bindImplementation(IdentifierFactory.class, StringIdentifierFactory.class)
-        .build();
+      .bindNamedParameter(NameResolverNameServerPort.class, Integer.toString(nameServer.getPort()))
+      .bindNamedParameter(NameResolverNameServerAddr.class, localAddressProvider.getLocalAddress())
+      .bindImplementation(IdentifierFactory.class, StringIdentifierFactory.class)
+      .build();
   }
 
   private Configuration getExecutorMessageConfiguration(final String executorId) {
     return Tang.Factory.getTang().newConfigurationBuilder()
-        .bindNamedParameter(MessageParameters.SenderId.class, executorId)
-        .build();
+      .bindNamedParameter(MessageParameters.SenderId.class, executorId)
+      .build();
   }
 }
diff --git a/runtime/driver/src/main/java/org/apache/nemo/driver/UserApplicationRunner.java b/runtime/driver/src/main/java/org/apache/nemo/driver/UserApplicationRunner.java
index 8c6c9d3..20635e0 100644
--- a/runtime/driver/src/main/java/org/apache/nemo/driver/UserApplicationRunner.java
+++ b/runtime/driver/src/main/java/org/apache/nemo/driver/UserApplicationRunner.java
@@ -18,6 +18,7 @@
  */
 package org.apache.nemo.driver;
 
+import org.apache.commons.lang3.SerializationUtils;
 import org.apache.nemo.common.Pair;
 import org.apache.nemo.common.ir.IRDAG;
 import org.apache.nemo.compiler.backend.Backend;
@@ -28,7 +29,6 @@
 import org.apache.nemo.runtime.common.plan.PlanRewriter;
 import org.apache.nemo.runtime.master.PlanStateManager;
 import org.apache.nemo.runtime.master.RuntimeMaster;
-import org.apache.commons.lang3.SerializationUtils;
 import org.apache.reef.tang.annotations.Parameter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -82,7 +82,7 @@
 
       // Execute!
       final Pair<PlanStateManager, ScheduledExecutorService> executionResult =
-          runtimeMaster.execute(physicalPlan, maxScheduleAttempt);
+        runtimeMaster.execute(physicalPlan, maxScheduleAttempt);
       runtimeMaster.recordIRDAGMetrics(optimizedDAG, physicalPlan.getPlanId());
 
       // Wait for the job to finish and stop logging
diff --git a/runtime/executor/pom.xml b/runtime/executor/pom.xml
index c733dd5..de3ee44 100644
--- a/runtime/executor/pom.xml
+++ b/runtime/executor/pom.xml
@@ -17,62 +17,63 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-runtime</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-runtime</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
 
-    <artifactId>nemo-runtime-executor</artifactId>
-    <name>Nemo Runtime Executor</name>
+  <artifactId>nemo-runtime-executor</artifactId>
+  <name>Nemo Runtime Executor</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-runtime-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-conf</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <!--Compression-->
-        <dependency>
-            <groupId>org.lz4</groupId>
-            <artifactId>lz4-java</artifactId>
-            <version>1.4.1</version>
-        </dependency>
-        <dependency>
-            <groupId>commons-io</groupId>
-            <artifactId>commons-io</artifactId>
-            <version>2.5</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-runtime-master</artifactId>
-            <version>0.2-SNAPSHOT</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <!--
-            This is needed to view the logs when running unit tests.
-            See https://dzone.com/articles/how-configure-slf4j-different for details.
-            -->
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-simple</artifactId>
-            <version>1.6.2</version>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-runtime-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-conf</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <!--Compression-->
+    <dependency>
+      <groupId>org.lz4</groupId>
+      <artifactId>lz4-java</artifactId>
+      <version>1.4.1</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <version>2.5</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-runtime-master</artifactId>
+      <version>0.2-SNAPSHOT</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <!--
+      This is needed to view the logs when running unit tests.
+      See https://dzone.com/articles/how-configure-slf4j-different for details.
+      -->
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-simple</artifactId>
+      <version>1.6.2</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
 </project>
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/Executor.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/Executor.java
index 0fd8cf1..4ef4c98 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/Executor.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/Executor.java
@@ -19,19 +19,21 @@
 package org.apache.nemo.runtime.executor;
 
 import com.google.protobuf.ByteString;
+import org.apache.commons.lang3.SerializationUtils;
+import org.apache.commons.lang3.concurrent.BasicThreadFactory;
 import org.apache.nemo.common.coder.BytesDecoderFactory;
 import org.apache.nemo.common.coder.BytesEncoderFactory;
 import org.apache.nemo.common.coder.DecoderFactory;
 import org.apache.nemo.common.coder.EncoderFactory;
 import org.apache.nemo.common.dag.DAG;
+import org.apache.nemo.common.exception.IllegalMessageException;
+import org.apache.nemo.common.exception.UnknownFailureCauseException;
+import org.apache.nemo.common.ir.edge.executionproperty.CompressionProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.DecoderProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.DecompressionProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.EncoderProperty;
-import org.apache.nemo.common.ir.edge.executionproperty.CompressionProperty;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.conf.JobConf;
-import org.apache.nemo.common.exception.IllegalMessageException;
-import org.apache.nemo.common.exception.UnknownFailureCauseException;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.message.MessageContext;
@@ -46,15 +48,13 @@
 import org.apache.nemo.runtime.executor.datatransfer.NemoEventDecoderFactory;
 import org.apache.nemo.runtime.executor.datatransfer.NemoEventEncoderFactory;
 import org.apache.nemo.runtime.executor.task.TaskExecutor;
-import org.apache.commons.lang3.SerializationUtils;
-import org.apache.commons.lang3.concurrent.BasicThreadFactory;
 import org.apache.reef.tang.annotations.Parameter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.inject.Inject;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * Executor.
@@ -95,8 +95,8 @@
                    final MetricManagerWorker metricMessageSender) {
     this.executorId = executorId;
     this.executorService = Executors.newCachedThreadPool(new BasicThreadFactory.Builder()
-        .namingPattern("TaskExecutor thread-%d")
-        .build());
+      .namingPattern("TaskExecutor thread-%d")
+      .build());
     this.persistentConnectionToMasterMap = persistentConnectionToMasterMap;
     this.serializerManager = serializerManager;
     this.intermediateDataIOFactory = intermediateDataIOFactory;
@@ -111,12 +111,13 @@
 
   private synchronized void onTaskReceived(final Task task) {
     LOG.debug("Executor [{}] received Task [{}] to execute.",
-        new Object[]{executorId, task.getTaskId()});
+      new Object[]{executorId, task.getTaskId()});
     executorService.execute(() -> launchTask(task));
   }
 
   /**
    * Launches the Task, and keeps track of the execution state with taskStateManager.
+   *
    * @param task to launch.
    */
   private void launchTask(final Task task) {
@@ -124,43 +125,43 @@
     try {
       final long deserializationStartTime = System.currentTimeMillis();
       final DAG<IRVertex, RuntimeEdge<IRVertex>> irDag =
-          SerializationUtils.deserialize(task.getSerializedIRDag());
+        SerializationUtils.deserialize(task.getSerializedIRDag());
       metricMessageSender.send("TaskMetric", task.getTaskId(), "taskDeserializationTime",
         SerializationUtils.serialize(System.currentTimeMillis() - deserializationStartTime));
       final TaskStateManager taskStateManager =
-          new TaskStateManager(task, executorId, persistentConnectionToMasterMap, metricMessageSender);
+        new TaskStateManager(task, executorId, persistentConnectionToMasterMap, metricMessageSender);
 
       task.getTaskIncomingEdges().forEach(e -> serializerManager.register(e.getId(),
-          getEncoderFactory(e.getPropertyValue(EncoderProperty.class).get()),
-          getDecoderFactory(e.getPropertyValue(DecoderProperty.class).get()),
-          e.getPropertyValue(CompressionProperty.class).orElse(null),
-          e.getPropertyValue(DecompressionProperty.class).orElse(null)));
+        getEncoderFactory(e.getPropertyValue(EncoderProperty.class).get()),
+        getDecoderFactory(e.getPropertyValue(DecoderProperty.class).get()),
+        e.getPropertyValue(CompressionProperty.class).orElse(null),
+        e.getPropertyValue(DecompressionProperty.class).orElse(null)));
       task.getTaskOutgoingEdges().forEach(e -> serializerManager.register(e.getId(),
-          getEncoderFactory(e.getPropertyValue(EncoderProperty.class).get()),
-          getDecoderFactory(e.getPropertyValue(DecoderProperty.class).get()),
-          e.getPropertyValue(CompressionProperty.class).orElse(null),
-          e.getPropertyValue(DecompressionProperty.class).orElse(null)));
+        getEncoderFactory(e.getPropertyValue(EncoderProperty.class).get()),
+        getDecoderFactory(e.getPropertyValue(DecoderProperty.class).get()),
+        e.getPropertyValue(CompressionProperty.class).orElse(null),
+        e.getPropertyValue(DecompressionProperty.class).orElse(null)));
       irDag.getVertices().forEach(v -> {
         irDag.getOutgoingEdgesOf(v).forEach(e -> serializerManager.register(e.getId(),
-            getEncoderFactory(e.getPropertyValue(EncoderProperty.class).get()),
-            getDecoderFactory(e.getPropertyValue(DecoderProperty.class).get()),
-            e.getPropertyValue(CompressionProperty.class).orElse(null),
-            e.getPropertyValue(DecompressionProperty.class).orElse(null)));
+          getEncoderFactory(e.getPropertyValue(EncoderProperty.class).get()),
+          getDecoderFactory(e.getPropertyValue(DecoderProperty.class).get()),
+          e.getPropertyValue(CompressionProperty.class).orElse(null),
+          e.getPropertyValue(DecompressionProperty.class).orElse(null)));
       });
 
       new TaskExecutor(task, irDag, taskStateManager, intermediateDataIOFactory, broadcastManagerWorker,
-          metricMessageSender, persistentConnectionToMasterMap).execute();
+        metricMessageSender, persistentConnectionToMasterMap).execute();
     } catch (final Exception e) {
       persistentConnectionToMasterMap.getMessageSender(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID).send(
-          ControlMessage.Message.newBuilder()
-              .setId(RuntimeIdManager.generateMessageId())
-              .setListenerId(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID)
-              .setType(ControlMessage.MessageType.ExecutorFailed)
-              .setExecutorFailedMsg(ControlMessage.ExecutorFailedMsg.newBuilder()
-                  .setExecutorId(executorId)
-                  .setException(ByteString.copyFrom(SerializationUtils.serialize(e)))
-                  .build())
-              .build());
+        ControlMessage.Message.newBuilder()
+          .setId(RuntimeIdManager.generateMessageId())
+          .setListenerId(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID)
+          .setType(ControlMessage.MessageType.ExecutorFailed)
+          .setExecutorFailedMsg(ControlMessage.ExecutorFailedMsg.newBuilder()
+            .setExecutorId(executorId)
+            .setException(ByteString.copyFrom(SerializationUtils.serialize(e)))
+            .build())
+          .build());
       throw e;
     }
   }
@@ -169,6 +170,7 @@
    * This wraps the encoder with NemoEventEncoder.
    * If the encoder is BytesEncoderFactory, we do not wrap the encoder.
    * TODO #276: Add NoCoder property value in Encoder/DecoderProperty
+   *
    * @param encoderFactory encoder factory
    * @return wrapped encoder
    */
@@ -184,6 +186,7 @@
    * This wraps the encoder with NemoEventDecoder.
    * If the decoder is BytesDecoderFactory, we do not wrap the decoder.
    * TODO #276: Add NoCoder property value in Encoder/DecoderProperty
+   *
    * @param decoderFactory decoder factory
    * @return wrapped decoder
    */
@@ -200,7 +203,7 @@
       metricMessageSender.close();
     } catch (final UnknownFailureCauseException e) {
       throw new UnknownFailureCauseException(
-          new Exception("Closing MetricManagerWorker failed in executor " + executorId));
+        new Exception("Closing MetricManagerWorker failed in executor " + executorId));
     }
   }
 
@@ -215,7 +218,7 @@
         case ScheduleTask:
           final ControlMessage.ScheduleTaskMsg scheduleTaskMsg = message.getScheduleTaskMsg();
           final Task task =
-              SerializationUtils.deserialize(scheduleTaskMsg.getTask().toByteArray());
+            SerializationUtils.deserialize(scheduleTaskMsg.getTask().toByteArray());
           onTaskReceived(task);
           break;
         case RequestMetricFlush:
@@ -223,15 +226,15 @@
           break;
         default:
           throw new IllegalMessageException(
-              new Exception("This message should not be received by an executor :" + message.getType()));
+            new Exception("This message should not be received by an executor :" + message.getType()));
       }
     }
 
     @Override
     public void onMessageWithContext(final ControlMessage.Message message, final MessageContext messageContext) {
       switch (message.getType()) {
-      default:
-        throw new IllegalMessageException(
+        default:
+          throw new IllegalMessageException(
             new Exception("This message should not be requested to an executor :" + message.getType()));
       }
     }
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/MetricManagerWorker.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/MetricManagerWorker.java
index 12ed335..9a71875 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/MetricManagerWorker.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/MetricManagerWorker.java
@@ -19,19 +19,18 @@
 package org.apache.nemo.runtime.executor;
 
 import com.google.protobuf.ByteString;
+import org.apache.nemo.common.exception.UnknownFailureCauseException;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.message.MessageEnvironment;
-import org.apache.nemo.common.exception.UnknownFailureCauseException;
 import org.apache.nemo.runtime.common.message.PersistentConnectionToMasterMap;
 import org.apache.reef.annotations.audience.EvaluatorSide;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.inject.Inject;
 import java.util.concurrent.*;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * Metric sender that periodically flushes the collected metrics to Driver.
  */
@@ -52,18 +51,18 @@
     this.persistentConnectionToMasterMap = persistentConnectionToMasterMap;
     final Runnable batchMetricMessages = () -> flushMetricMessageQueueToMaster();
     this.scheduledExecutorService.scheduleAtFixedRate(batchMetricMessages, 0,
-                                                      FLUSHING_PERIOD, TimeUnit.MILLISECONDS);
+      FLUSHING_PERIOD, TimeUnit.MILLISECONDS);
   }
 
   @Override
   public void flush() {
     flushMetricMessageQueueToMaster();
     persistentConnectionToMasterMap.getMessageSender(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID).send(
-        ControlMessage.Message.newBuilder()
-            .setId(RuntimeIdManager.generateMessageId())
-            .setListenerId(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID)
-            .setType(ControlMessage.MessageType.MetricFlushed)
-            .build());
+      ControlMessage.Message.newBuilder()
+        .setId(RuntimeIdManager.generateMessageId())
+        .setListenerId(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID)
+        .setType(ControlMessage.MessageType.MetricFlushed)
+        .build());
   }
 
   private synchronized void flushMetricMessageQueueToMaster() {
@@ -81,12 +80,12 @@
       }
 
       persistentConnectionToMasterMap.getMessageSender(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID).send(
-          ControlMessage.Message.newBuilder()
-              .setId(RuntimeIdManager.generateMessageId())
-              .setListenerId(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID)
-              .setType(ControlMessage.MessageType.MetricMessageReceived)
-              .setMetricMsg(metricMsgBuilder.build())
-              .build());
+        ControlMessage.Message.newBuilder()
+          .setId(RuntimeIdManager.generateMessageId())
+          .setListenerId(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID)
+          .setType(ControlMessage.MessageType.MetricMessageReceived)
+          .setMetricMsg(metricMsgBuilder.build())
+          .build());
     }
   }
 
@@ -94,12 +93,12 @@
   public void send(final String metricType, final String metricId,
                    final String metricField, final byte[] metricValue) {
     metricMessageQueue.add(
-        ControlMessage.Metric.newBuilder()
-            .setMetricType(metricType)
-            .setMetricId(metricId)
-            .setMetricField(metricField)
-            .setMetricValue(ByteString.copyFrom(metricValue))
-            .build());
+      ControlMessage.Metric.newBuilder()
+        .setMetricType(metricType)
+        .setMetricId(metricId)
+        .setMetricField(metricField)
+        .setMetricValue(ByteString.copyFrom(metricValue))
+        .build());
   }
 
   @Override
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/MetricMessageSender.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/MetricMessageSender.java
index ede22d8..4c8de1a 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/MetricMessageSender.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/MetricMessageSender.java
@@ -28,8 +28,9 @@
 
   /**
    * Send metric to master.
-   * @param metricType type of the metric
-   * @param metricId id of the metric
+   *
+   * @param metricType  type of the metric
+   * @param metricId    id of the metric
    * @param metricField field of the metric
    * @param metricValue value of the metric which is serialized
    */
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/TaskStateManager.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/TaskStateManager.java
index ceae6e0..5a3eee5 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/TaskStateManager.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/TaskStateManager.java
@@ -18,6 +18,7 @@
  */
 package org.apache.nemo.runtime.executor;
 
+import org.apache.commons.lang3.SerializationUtils;
 import org.apache.nemo.common.exception.UnknownExecutionStateException;
 import org.apache.nemo.common.exception.UnknownFailureCauseException;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
@@ -26,15 +27,13 @@
 import org.apache.nemo.runtime.common.message.PersistentConnectionToMasterMap;
 import org.apache.nemo.runtime.common.metric.StateTransitionEvent;
 import org.apache.nemo.runtime.common.plan.Task;
-
-import java.util.*;
-
 import org.apache.nemo.runtime.common.state.TaskState;
-import org.apache.commons.lang3.SerializationUtils;
 import org.apache.reef.annotations.audience.EvaluatorSide;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.Optional;
+
 /**
  * Manages the states related to a task.
  * The methods of this class are synchronized.
@@ -60,24 +59,25 @@
     this.metricMessageSender = metricMessageSender;
 
     metricMessageSender.send("TaskMetric", taskId,
-        "containerId", SerializationUtils.serialize(executorId));
+      "containerId", SerializationUtils.serialize(executorId));
     metricMessageSender.send("TaskMetric", taskId,
-        "scheduleAttempt", SerializationUtils.serialize(attemptIdx));
+      "scheduleAttempt", SerializationUtils.serialize(attemptIdx));
   }
 
   /**
    * Updates the state of the task.
-   * @param newState of the task.
+   *
+   * @param newState        of the task.
    * @param vertexPutOnHold the vertex put on hold.
-   * @param cause only provided as non-empty upon recoverable failures.
+   * @param cause           only provided as non-empty upon recoverable failures.
    */
   public synchronized void onTaskStateChanged(final TaskState.State newState,
                                               final Optional<String> vertexPutOnHold,
                                               final Optional<TaskState.RecoverableTaskFailureCause> cause) {
     metricMessageSender.send("TaskMetric", taskId,
-        "stateTransitionEvent", SerializationUtils.serialize(new StateTransitionEvent<>(
-            System.currentTimeMillis(), null, newState
-        )));
+      "stateTransitionEvent", SerializationUtils.serialize(new StateTransitionEvent<>(
+        System.currentTimeMillis(), null, newState
+      )));
 
     switch (newState) {
       case EXECUTING:
@@ -106,19 +106,20 @@
 
   /**
    * Notifies the change in task state to master.
-   * @param newState of the task.
+   *
+   * @param newState        of the task.
    * @param vertexPutOnHold the vertex put on hold.
-   * @param cause only provided as non-empty upon recoverable failures.
+   * @param cause           only provided as non-empty upon recoverable failures.
    */
   private void notifyTaskStateToMaster(final TaskState.State newState,
                                        final Optional<String> vertexPutOnHold,
                                        final Optional<TaskState.RecoverableTaskFailureCause> cause) {
     final ControlMessage.TaskStateChangedMsg.Builder msgBuilder =
-        ControlMessage.TaskStateChangedMsg.newBuilder()
-            .setExecutorId(executorId)
-            .setTaskId(taskId)
-            .setAttemptIdx(attemptIdx)
-            .setState(convertState(newState));
+      ControlMessage.TaskStateChangedMsg.newBuilder()
+        .setExecutorId(executorId)
+        .setTaskId(taskId)
+        .setAttemptIdx(attemptIdx)
+        .setState(convertState(newState));
     if (vertexPutOnHold.isPresent()) {
       msgBuilder.setVertexPutOnHoldId(vertexPutOnHold.get());
     }
@@ -128,12 +129,12 @@
 
     // Send taskStateChangedMsg to master!
     persistentConnectionToMasterMap.getMessageSender(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID).send(
-        ControlMessage.Message.newBuilder()
-            .setId(RuntimeIdManager.generateMessageId())
-            .setListenerId(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID)
-            .setType(ControlMessage.MessageType.TaskStateChanged)
-            .setTaskStateChangedMsg(msgBuilder.build())
-            .build());
+      ControlMessage.Message.newBuilder()
+        .setId(RuntimeIdManager.generateMessageId())
+        .setListenerId(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID)
+        .setType(ControlMessage.MessageType.TaskStateChanged)
+        .setTaskStateChangedMsg(msgBuilder.build())
+        .build());
   }
 
   private ControlMessage.TaskStateFromExecutor convertState(final TaskState.State state) {
@@ -156,7 +157,7 @@
   }
 
   private ControlMessage.RecoverableFailureCause convertFailureCause(
-      final TaskState.RecoverableTaskFailureCause cause) {
+    final TaskState.RecoverableTaskFailureCause cause) {
     switch (cause) {
       case INPUT_READ_FAILURE:
         return ControlMessage.RecoverableFailureCause.InputReadFailure;
@@ -164,7 +165,7 @@
         return ControlMessage.RecoverableFailureCause.OutputWriteFailure;
       default:
         throw new UnknownFailureCauseException(
-            new Throwable("The failure cause for the recoverable failure is unknown"));
+          new Throwable("The failure cause for the recoverable failure is unknown"));
     }
   }
 
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/TransformContextImpl.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/TransformContextImpl.java
index 4dd105d..c2556dd 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/TransformContextImpl.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/TransformContextImpl.java
@@ -33,6 +33,7 @@
 
   /**
    * Constructor of Context Implementation.
+   *
    * @param broadcastManagerWorker for broadcast variables.
    */
   public TransformContextImpl(final BroadcastManagerWorker broadcastManagerWorker) {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteInputContext.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteInputContext.java
index 439decd..30c7304 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteInputContext.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteInputContext.java
@@ -71,10 +71,11 @@
 
   /**
    * Creates an input context.
-   * @param remoteExecutorId    id of the remote executor
-   * @param contextId           identifier for this context
-   * @param contextDescriptor   user-provided context descriptor
-   * @param contextManager      {@link ContextManager} for the channel
+   *
+   * @param remoteExecutorId  id of the remote executor
+   * @param contextId         identifier for this context
+   * @param contextDescriptor user-provided context descriptor
+   * @param contextManager    {@link ContextManager} for the channel
    */
   ByteInputContext(final String remoteExecutorId,
                    final ContextId contextId,
@@ -86,6 +87,7 @@
   /**
    * Returns {@link Iterator} of {@link InputStream}s.
    * This method always returns the same {@link Iterator} instance.
+   *
    * @return {@link Iterator} of {@link InputStream}s.
    */
   public Iterator<InputStream> getInputStreams() {
@@ -94,6 +96,7 @@
 
   /**
    * Returns a future, which is completed when the corresponding transfer for this context gets done.
+   *
    * @return a {@link CompletableFuture} for the same value that {@link #getInputStreams()} returns
    */
   public CompletableFuture<Iterator<InputStream>> getCompletedFuture() {
@@ -113,6 +116,7 @@
 
   /**
    * Called when {@link ByteBuf} is supplied to this context.
+   *
    * @param byteBuf the {@link ByteBuf} to supply
    */
   void onByteBuf(final ByteBuf byteBuf) {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteOutputContext.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteOutputContext.java
index 12761b2..019358a 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteOutputContext.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteOutputContext.java
@@ -18,13 +18,15 @@
  */
 package org.apache.nemo.runtime.executor.bytetransfer;
 
+import io.netty.buffer.ByteBuf;
 import io.netty.buffer.ByteBufOutputStream;
+import io.netty.channel.Channel;
+import io.netty.channel.DefaultFileRegion;
+import io.netty.channel.FileRegion;
 import org.apache.nemo.common.coder.EncoderFactory;
 import org.apache.nemo.runtime.executor.data.DataUtil;
 import org.apache.nemo.runtime.executor.data.FileArea;
 import org.apache.nemo.runtime.executor.data.partition.SerializedPartition;
-import io.netty.buffer.ByteBuf;
-import io.netty.channel.*;
 import org.apache.nemo.runtime.executor.data.streamchainer.Serializer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -53,10 +55,10 @@
   /**
    * Creates a output context.
    *
-   * @param remoteExecutorId    id of the remote executor
-   * @param contextId           identifier for this context
-   * @param contextDescriptor   user-provided context descriptor
-   * @param contextManager      {@link ContextManager} for the channel
+   * @param remoteExecutorId  id of the remote executor
+   * @param contextId         identifier for this context
+   * @param contextDescriptor user-provided context descriptor
+   * @param contextManager    {@link ContextManager} for the channel
    */
   ByteOutputContext(final String remoteExecutorId,
                     final ContextId contextId,
@@ -68,6 +70,7 @@
 
   /**
    * Closes existing sub-stream (if any) and create a new sub-stream.
+   *
    * @return new {@link ByteOutputStream}
    * @throws IOException if an exception was set or this context was closed.
    */
@@ -149,6 +152,7 @@
 
     /**
      * Writes {@link SerializedPartition}.
+     *
      * @param serializedPartition {@link SerializedPartition} to write.
      * @return {@code this}
      * @throws IOException when an exception has been set or this stream was closed
@@ -194,6 +198,7 @@
 
     /**
      * Writes a data frame, from {@link ByteBuf}.
+     *
      * @param byteBuf {@link ByteBuf} to write.
      */
     private void writeByteBuf(final ByteBuf byteBuf) throws IOException {
@@ -204,7 +209,8 @@
 
     /**
      * Write an element to the channel.
-     * @param element element
+     *
+     * @param element    element
      * @param serializer serializer
      */
     public void writeElement(final Object element,
@@ -226,8 +232,9 @@
 
     /**
      * Writes a data frame.
-     * @param body        the body or {@code null}
-     * @param length      the length of the body, in bytes
+     *
+     * @param body   the body or {@code null}
+     * @param length the length of the body, in bytes
      * @throws IOException when an exception has been set or this stream was closed
      */
     private void writeDataFrame(final Object body, final long length) throws IOException {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransfer.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransfer.java
index ef49c36..806b0a9 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransfer.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransfer.java
@@ -18,7 +18,8 @@
  */
 package org.apache.nemo.runtime.executor.bytetransfer;
 
-import io.netty.channel.*;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelFuture;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -41,6 +42,7 @@
 
   /**
    * Creates a byte transfer.
+   *
    * @param byteTransport provides channels to other executors
    */
   @Inject
@@ -50,6 +52,7 @@
 
   /**
    * Initiate a transfer context to receive data.
+   *
    * @param executorId        the id of the remote executor
    * @param contextDescriptor user-provided descriptor for the new context
    * @param isPipe            is pipe
@@ -63,8 +66,9 @@
 
   /**
    * Initiate a transfer context to send data.
-   * @param executorId         the id of the remote executor
-   * @param contextDescriptor  user-provided descriptor for the new context
+   *
+   * @param executorId        the id of the remote executor
+   * @param contextDescriptor user-provided descriptor for the new context
    * @param isPipe            is pipe
    * @return a {@link ByteOutputContext} to which data can be written
    */
@@ -84,7 +88,7 @@
     try {
       channelFuture = executorIdToChannelFutureMap.compute(remoteExecutorId, (executorId, cachedChannelFuture) -> {
         if (cachedChannelFuture != null
-            && (cachedChannelFuture.channel().isOpen() || cachedChannelFuture.channel().isActive())) {
+          && (cachedChannelFuture.channel().isOpen() || cachedChannelFuture.channel().isActive())) {
           return cachedChannelFuture;
         } else {
           final ChannelFuture future = byteTransport.connectTo(executorId);
@@ -109,8 +113,9 @@
 
   /**
    * Called when a remote executor initiates new transfer context.
-   * @param remoteExecutorId  id of the remote executor
-   * @param channel           the corresponding {@link Channel}.
+   *
+   * @param remoteExecutorId id of the remote executor
+   * @param channel          the corresponding {@link Channel}.
    */
   void onNewContextByRemoteExecutor(final String remoteExecutorId, final Channel channel) {
     executorIdToChannelFutureMap.compute(remoteExecutorId, (executorId, cachedChannelFuture) -> {
@@ -121,7 +126,7 @@
         return cachedChannelFuture;
       } else {
         LOG.warn("Duplicate channel for remote {}({}) and this executor",
-            new Object[]{executorId, channel.remoteAddress()});
+          new Object[]{executorId, channel.remoteAddress()});
         return channel.newSucceededFuture();
       }
     });
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransferContext.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransferContext.java
index 55226f3..e2c7adf 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransferContext.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransferContext.java
@@ -18,9 +18,9 @@
  */
 package org.apache.nemo.runtime.executor.bytetransfer;
 
-import org.apache.nemo.runtime.common.comm.ControlMessage.ByteTransferDataDirection;
 import io.netty.channel.ChannelFuture;
 import io.netty.channel.ChannelFutureListener;
+import org.apache.nemo.runtime.common.comm.ControlMessage.ByteTransferDataDirection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -45,10 +45,11 @@
 
   /**
    * Creates a transfer context.
-   * @param remoteExecutorId    id of the remote executor
-   * @param contextId           identifier for this context
-   * @param contextDescriptor   user-provided context descriptor
-   * @param contextManager      to de-register context when this context expires
+   *
+   * @param remoteExecutorId  id of the remote executor
+   * @param contextId         identifier for this context
+   * @param contextDescriptor user-provided context descriptor
+   * @param contextManager    to de-register context when this context expires
    */
   ByteTransferContext(final String remoteExecutorId,
                       final ContextId contextId,
@@ -82,14 +83,14 @@
   }
 
   /**
-   * @return  Whether this context has exception or not.
+   * @return Whether this context has exception or not.
    */
   public final boolean hasException() {
     return hasException;
   }
 
   /**
-   * @return  The exception involved with this context, or {@code null}.
+   * @return The exception involved with this context, or {@code null}.
    */
   public final Throwable getException() {
     return exception;
@@ -109,12 +110,14 @@
 
   /**
    * Handles exception.
+   *
    * @param cause the cause of exception handling
    */
   public abstract void onChannelError(@Nullable final Throwable cause);
 
   /**
    * Sets exception.
+   *
    * @param cause the exception to set
    */
   protected final void setChannelError(@Nullable final Throwable cause) {
@@ -145,6 +148,7 @@
 
     /**
      * Create {@link ContextId}.
+     *
      * @param initiatorExecutorId id of the executor who initiated this context and issued context id
      * @param partnerExecutorId   the other executor
      * @param dataDirection       the direction of the data flow
@@ -202,9 +206,9 @@
       }
       final ContextId contextId = (ContextId) other;
       return transferIndex == contextId.transferIndex
-          && Objects.equals(initiatorExecutorId, contextId.initiatorExecutorId)
-          && Objects.equals(partnerExecutorId, contextId.partnerExecutorId)
-          && dataDirection == contextId.dataDirection;
+        && Objects.equals(initiatorExecutorId, contextId.initiatorExecutorId)
+        && Objects.equals(partnerExecutorId, contextId.partnerExecutorId)
+        && dataDirection == contextId.dataDirection;
     }
 
     @Override
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransport.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransport.java
index c535fad..8c0d629 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransport.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransport.java
@@ -18,17 +18,20 @@
  */
 package org.apache.nemo.runtime.executor.bytetransfer;
 
-import org.apache.nemo.conf.JobConf;
-import org.apache.nemo.runtime.common.NettyChannelImplementationSelector;
 import io.netty.bootstrap.Bootstrap;
 import io.netty.bootstrap.ServerBootstrap;
-import io.netty.channel.*;
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelFuture;
+import io.netty.channel.ChannelOption;
+import io.netty.channel.EventLoopGroup;
 import io.netty.channel.group.ChannelGroup;
 import io.netty.channel.group.ChannelGroupFuture;
 import io.netty.channel.group.DefaultChannelGroup;
 import io.netty.util.concurrent.DefaultThreadFactory;
 import io.netty.util.concurrent.Future;
 import io.netty.util.concurrent.GlobalEventExecutor;
+import org.apache.nemo.conf.JobConf;
+import org.apache.nemo.runtime.common.NettyChannelImplementationSelector;
 import org.apache.reef.io.network.naming.NameResolver;
 import org.apache.reef.tang.annotations.Parameter;
 import org.apache.reef.wake.Identifier;
@@ -61,31 +64,32 @@
 
   /**
    * Constructs a byte transport and starts listening.
-   * @param nameResolver          provides naming registry
-   * @param localExecutorId       the id of this executor
-   * @param channelImplSelector   provides implementation for netty channel
-   * @param channelInitializer    initializes channel pipeline
-   * @param tcpPortProvider       provides an iterator of random tcp ports
-   * @param localAddressProvider  provides the local address of the node to bind to
-   * @param port                  the listening port; 0 means random assign using {@code tcpPortProvider}
-   * @param serverBacklog         the maximum number of pending connections to the server
-   * @param numListeningThreads   the number of listening threads of the server
-   * @param numWorkingThreads     the number of working threads of the server
-   * @param numClientThreads      the number of client threads
+   *
+   * @param nameResolver         provides naming registry
+   * @param localExecutorId      the id of this executor
+   * @param channelImplSelector  provides implementation for netty channel
+   * @param channelInitializer   initializes channel pipeline
+   * @param tcpPortProvider      provides an iterator of random tcp ports
+   * @param localAddressProvider provides the local address of the node to bind to
+   * @param port                 the listening port; 0 means random assign using {@code tcpPortProvider}
+   * @param serverBacklog        the maximum number of pending connections to the server
+   * @param numListeningThreads  the number of listening threads of the server
+   * @param numWorkingThreads    the number of working threads of the server
+   * @param numClientThreads     the number of client threads
    */
   @Inject
   private ByteTransport(
-      final NameResolver nameResolver,
-      @Parameter(JobConf.ExecutorId.class) final String localExecutorId,
-      final NettyChannelImplementationSelector channelImplSelector,
-      final ByteTransportChannelInitializer channelInitializer,
-      final TcpPortProvider tcpPortProvider,
-      final LocalAddressProvider localAddressProvider,
-      @Parameter(JobConf.PartitionTransportServerPort.class) final int port,
-      @Parameter(JobConf.PartitionTransportServerBacklog.class) final int serverBacklog,
-      @Parameter(JobConf.PartitionTransportServerNumListeningThreads.class) final int numListeningThreads,
-      @Parameter(JobConf.PartitionTransportServerNumWorkingThreads.class) final int numWorkingThreads,
-      @Parameter(JobConf.PartitionTransportClientNumThreads.class) final int numClientThreads) {
+    final NameResolver nameResolver,
+    @Parameter(JobConf.ExecutorId.class) final String localExecutorId,
+    final NettyChannelImplementationSelector channelImplSelector,
+    final ByteTransportChannelInitializer channelInitializer,
+    final TcpPortProvider tcpPortProvider,
+    final LocalAddressProvider localAddressProvider,
+    @Parameter(JobConf.PartitionTransportServerPort.class) final int port,
+    @Parameter(JobConf.PartitionTransportServerBacklog.class) final int serverBacklog,
+    @Parameter(JobConf.PartitionTransportServerNumListeningThreads.class) final int numListeningThreads,
+    @Parameter(JobConf.PartitionTransportServerNumWorkingThreads.class) final int numWorkingThreads,
+    @Parameter(JobConf.PartitionTransportClientNumThreads.class) final int numClientThreads) {
 
     this.nameResolver = nameResolver;
 
@@ -96,23 +100,23 @@
     final String host = localAddressProvider.getLocalAddress();
 
     serverListeningGroup = channelImplSelector.newEventLoopGroup(numListeningThreads,
-        new DefaultThreadFactory(SERVER_LISTENING));
+      new DefaultThreadFactory(SERVER_LISTENING));
     serverWorkingGroup = channelImplSelector.newEventLoopGroup(numWorkingThreads,
-        new DefaultThreadFactory(SERVER_WORKING));
+      new DefaultThreadFactory(SERVER_WORKING));
     clientGroup = channelImplSelector.newEventLoopGroup(numClientThreads, new DefaultThreadFactory(CLIENT));
 
     clientBootstrap = new Bootstrap()
-        .group(clientGroup)
-        .channel(channelImplSelector.getChannelClass())
-        .handler(channelInitializer)
-        .option(ChannelOption.SO_REUSEADDR, true);
+      .group(clientGroup)
+      .channel(channelImplSelector.getChannelClass())
+      .handler(channelInitializer)
+      .option(ChannelOption.SO_REUSEADDR, true);
 
     final ServerBootstrap serverBootstrap = new ServerBootstrap()
-        .group(serverListeningGroup, serverWorkingGroup)
-        .channel(channelImplSelector.getServerChannelClass())
-        .childHandler(channelInitializer)
-        .option(ChannelOption.SO_BACKLOG, serverBacklog)
-        .option(ChannelOption.SO_REUSEADDR, true);
+      .group(serverListeningGroup, serverWorkingGroup)
+      .channel(channelImplSelector.getServerChannelClass())
+      .childHandler(channelInitializer)
+      .option(ChannelOption.SO_BACKLOG, serverBacklog)
+      .option(ChannelOption.SO_REUSEADDR, true);
 
     Channel listeningChannel = null;
     if (port == 0) {
@@ -193,7 +197,8 @@
 
   /**
    * Connect to the {@link ByteTransport} server of the specified executor.
-   * @param remoteExecutorId  the id of the executor
+   *
+   * @param remoteExecutorId the id of the executor
    * @return a {@link ChannelFuture} for connecting
    */
   ChannelFuture connectTo(final String remoteExecutorId) {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransportChannelInitializer.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransportChannelInitializer.java
index ee80527..ac5a0ae 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransportChannelInitializer.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransportChannelInitializer.java
@@ -18,10 +18,10 @@
  */
 package org.apache.nemo.runtime.executor.bytetransfer;
 
-import org.apache.nemo.conf.JobConf;
-import org.apache.nemo.runtime.executor.data.BlockManagerWorker;
 import io.netty.channel.ChannelInitializer;
 import io.netty.channel.socket.SocketChannel;
+import org.apache.nemo.conf.JobConf;
+import org.apache.nemo.runtime.executor.data.BlockManagerWorker;
 import org.apache.nemo.runtime.executor.data.PipeManagerWorker;
 import org.apache.reef.tang.InjectionFuture;
 import org.apache.reef.tang.annotations.Parameter;
@@ -101,12 +101,12 @@
     final ContextManager contextManager = new ContextManager(pipeManagerWorker.get(), blockManagerWorker.get(),
       byteTransfer.get(), byteTransport.get().getChannelGroup(), localExecutorId, ch);
     ch.pipeline()
-        // inbound
-        .addLast(new FrameDecoder(contextManager))
-        // outbound
-        .addLast(controlFrameEncoder)
-        .addLast(dataFrameEncoder)
-        // inbound
-        .addLast(contextManager);
+      // inbound
+      .addLast(new FrameDecoder(contextManager))
+      // outbound
+      .addLast(controlFrameEncoder)
+      .addLast(dataFrameEncoder)
+      // inbound
+      .addLast(contextManager);
   }
 }
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ClosableBlockingQueue.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ClosableBlockingQueue.java
index 3d992e7..7bc8855 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ClosableBlockingQueue.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ClosableBlockingQueue.java
@@ -56,7 +56,7 @@
    *
    * @param element the element to add
    * @throws IllegalStateException if the input end of this queue has been closed
-   * @throws NullPointerException if {@code element} is {@code null}
+   * @throws NullPointerException  if {@code element} is {@code null}
    */
   public synchronized void put(final T element) {
     if (element == null) {
@@ -80,6 +80,7 @@
 
   /**
    * Mark the input end of this queue as closed.
+   *
    * @param throwableToSet a throwable to set as the cause
    */
   public synchronized void closeExceptionally(final Throwable throwableToSet) {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ContextManager.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ContextManager.java
index c694657..bfbbda9 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ContextManager.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ContextManager.java
@@ -18,12 +18,14 @@
  */
 package org.apache.nemo.runtime.executor.bytetransfer;
 
+import io.netty.channel.Channel;
+import io.netty.channel.ChannelHandlerContext;
+import io.netty.channel.SimpleChannelInboundHandler;
+import io.netty.channel.group.ChannelGroup;
 import org.apache.nemo.runtime.common.comm.ControlMessage.ByteTransferContextSetupMessage;
 import org.apache.nemo.runtime.common.comm.ControlMessage.ByteTransferDataDirection;
 import org.apache.nemo.runtime.executor.bytetransfer.ByteTransferContext.ContextId;
 import org.apache.nemo.runtime.executor.data.BlockManagerWorker;
-import io.netty.channel.*;
-import io.netty.channel.group.ChannelGroup;
 import org.apache.nemo.runtime.executor.data.PipeManagerWorker;
 
 import java.util.concurrent.ConcurrentHashMap;
@@ -53,12 +55,13 @@
 
   /**
    * Creates context manager for this channel.
-   * @param pipeManagerWorker   provides handler for new contexts by remote executors
-   * @param blockManagerWorker  provides handler for new contexts by remote executors
-   * @param byteTransfer        provides channel caching
-   * @param channelGroup        to cleanup this channel when closing {@link ByteTransport}
-   * @param localExecutorId     local executor id
-   * @param channel             the {@link Channel} to manage
+   *
+   * @param pipeManagerWorker  provides handler for new contexts by remote executors
+   * @param blockManagerWorker provides handler for new contexts by remote executors
+   * @param byteTransfer       provides channel caching
+   * @param channelGroup       to cleanup this channel when closing {@link ByteTransport}
+   * @param localExecutorId    local executor id
+   * @param channel            the {@link Channel} to manage
    */
   ContextManager(final PipeManagerWorker pipeManagerWorker,
                  final BlockManagerWorker blockManagerWorker,
@@ -83,6 +86,7 @@
 
   /**
    * Returns {@link ByteInputContext} to provide {@link io.netty.buffer.ByteBuf}s on.
+   *
    * @param dataDirection the data direction
    * @param transferIndex transfer index
    * @return the {@link ByteInputContext} corresponding to the pair of {@code dataDirection} and {@code transferIndex}
@@ -90,20 +94,21 @@
   ByteInputContext getInputContext(final ByteTransferDataDirection dataDirection,
                                    final int transferIndex) {
     final ConcurrentMap<Integer, ByteInputContext> contexts =
-        dataDirection == ByteTransferDataDirection.INITIATOR_SENDS_DATA
-            ? inputContextsInitiatedByRemote : inputContextsInitiatedByLocal;
+      dataDirection == ByteTransferDataDirection.INITIATOR_SENDS_DATA
+        ? inputContextsInitiatedByRemote : inputContextsInitiatedByLocal;
     return contexts.get(transferIndex);
   }
 
   /**
    * Responds to new transfer contexts by a remote executor.
-   * @param ctx netty {@link ChannelHandlerContext}
+   *
+   * @param ctx     netty {@link ChannelHandlerContext}
    * @param message context setup message from the remote executor
    * @throws Exception exceptions from handler
    */
   @Override
   protected void channelRead0(final ChannelHandlerContext ctx, final ByteTransferContextSetupMessage message)
-      throws Exception {
+    throws Exception {
     setRemoteExecutorId(message.getInitiatorExecutorId());
     byteTransfer.onNewContextByRemoteExecutor(message.getInitiatorExecutorId(), channel);
     final ByteTransferDataDirection dataDirection = message.getDataDirection();
@@ -143,27 +148,29 @@
 
   /**
    * Removes the specified contexts from map.
+   *
    * @param context the {@link ByteTransferContext} to remove.
    */
   void onContextExpired(final ByteTransferContext context) {
     final ContextId contextId = context.getContextId();
     final ConcurrentMap<Integer, ? extends ByteTransferContext> contexts = context instanceof ByteInputContext
-        ? (contextId.getDataDirection() == ByteTransferDataDirection.INITIATOR_SENDS_DATA
-            ? inputContextsInitiatedByRemote : inputContextsInitiatedByLocal)
-        : (contextId.getDataDirection() == ByteTransferDataDirection.INITIATOR_SENDS_DATA
-            ? outputContextsInitiatedByLocal : outputContextsInitiatedByRemote);
+      ? (contextId.getDataDirection() == ByteTransferDataDirection.INITIATOR_SENDS_DATA
+      ? inputContextsInitiatedByRemote : inputContextsInitiatedByLocal)
+      : (contextId.getDataDirection() == ByteTransferDataDirection.INITIATOR_SENDS_DATA
+      ? outputContextsInitiatedByLocal : outputContextsInitiatedByRemote);
     contexts.remove(contextId.getTransferIndex(), context);
   }
 
   /**
    * Initiates a context and stores to the specified map.
-   * @param contexts map for storing context
+   *
+   * @param contexts             map for storing context
    * @param transferIndexCounter counter for generating transfer index
-   * @param dataDirection data direction to include in the context id
-   * @param contextGenerator a function that returns context from context id
-   * @param executorId id of the remote executor
-   * @param <T> {@link ByteInputContext} or {@link ByteOutputContext}
-   * @param isPipe is a pipe context
+   * @param dataDirection        data direction to include in the context id
+   * @param contextGenerator     a function that returns context from context id
+   * @param executorId           id of the remote executor
+   * @param <T>                  {@link ByteInputContext} or {@link ByteOutputContext}
+   * @param isPipe               is a pipe context
    * @return generated context
    */
   <T extends ByteTransferContext> T newContext(final ConcurrentMap<Integer, T> contexts,
@@ -187,34 +194,37 @@
 
   /**
    * Create a new {@link ByteInputContext}.
-   * @param executorId target executor id
+   *
+   * @param executorId        target executor id
    * @param contextDescriptor the context descriptor
    * @param isPipe            is pipe
    * @return new {@link ByteInputContext}
    */
   ByteInputContext newInputContext(final String executorId, final byte[] contextDescriptor, final boolean isPipe) {
     return newContext(inputContextsInitiatedByLocal, nextInputTransferIndex,
-        ByteTransferDataDirection.INITIATOR_RECEIVES_DATA,
-        contextId -> new ByteInputContext(executorId, contextId, contextDescriptor, this),
-        executorId, isPipe);
+      ByteTransferDataDirection.INITIATOR_RECEIVES_DATA,
+      contextId -> new ByteInputContext(executorId, contextId, contextDescriptor, this),
+      executorId, isPipe);
   }
 
   /**
    * Create a new {@link ByteOutputContext}.
-   * @param executorId target executor id
+   *
+   * @param executorId        target executor id
    * @param contextDescriptor the context descriptor
    * @param isPipe            is pipe
    * @return new {@link ByteOutputContext}
    */
   ByteOutputContext newOutputContext(final String executorId, final byte[] contextDescriptor, final boolean isPipe) {
     return newContext(outputContextsInitiatedByLocal, nextOutputTransferIndex,
-        ByteTransferDataDirection.INITIATOR_SENDS_DATA,
-        contextId -> new ByteOutputContext(executorId, contextId, contextDescriptor, this),
-        executorId, isPipe);
+      ByteTransferDataDirection.INITIATOR_SENDS_DATA,
+      contextId -> new ByteOutputContext(executorId, contextId, contextDescriptor, this),
+      executorId, isPipe);
   }
 
   /**
    * Set this contest manager as connected to the specified remote executor.
+   *
    * @param executorId the remote executor id
    */
   private void setRemoteExecutorId(final String executorId) {
@@ -242,9 +252,10 @@
 
   /**
    * Invoke {@link ByteTransferContext#onChannelError(Throwable)} on the specified contexts.
+   *
    * @param contexts map storing the contexts
-   * @param cause the error
-   * @param <T> {@link ByteInputContext} or {@link ByteOutputContext}
+   * @param cause    the error
+   * @param <T>      {@link ByteInputContext} or {@link ByteOutputContext}
    */
   private <T extends ByteTransferContext> void throwChannelErrorOnContexts(final ConcurrentMap<Integer, T> contexts,
                                                                            final Throwable cause) {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ControlFrameEncoder.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ControlFrameEncoder.java
index 836177a..b4bace1 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ControlFrameEncoder.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ControlFrameEncoder.java
@@ -19,13 +19,13 @@
 package org.apache.nemo.runtime.executor.bytetransfer;
 
 import com.google.protobuf.ByteString;
-import org.apache.nemo.conf.JobConf;
-import org.apache.nemo.runtime.common.comm.ControlMessage.ByteTransferContextSetupMessage;
 import io.netty.buffer.ByteBuf;
 import io.netty.buffer.Unpooled;
 import io.netty.channel.ChannelHandler;
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.handler.codec.MessageToMessageEncoder;
+import org.apache.nemo.conf.JobConf;
+import org.apache.nemo.runtime.common.comm.ControlMessage.ByteTransferContextSetupMessage;
 import org.apache.reef.tang.annotations.Parameter;
 
 import javax.inject.Inject;
@@ -58,12 +58,12 @@
                         final ByteTransferContext in,
                         final List out) {
     final ByteTransferContextSetupMessage message = ByteTransferContextSetupMessage.newBuilder()
-        .setInitiatorExecutorId(localExecutorId)
-        .setTransferIndex(in.getContextId().getTransferIndex())
-        .setDataDirection(in.getContextId().getDataDirection())
-        .setContextDescriptor(ByteString.copyFrom(in.getContextDescriptor()))
-        .setIsPipe(in.getContextId().isPipe())
-        .build();
+      .setInitiatorExecutorId(localExecutorId)
+      .setTransferIndex(in.getContextId().getTransferIndex())
+      .setDataDirection(in.getContextId().getDataDirection())
+      .setContextDescriptor(ByteString.copyFrom(in.getContextDescriptor()))
+      .setIsPipe(in.getContextId().isPipe())
+      .build();
     final byte[] frameBody = message.toByteArray();
     out.add(ZEROS.retain());
     out.add(ctx.alloc().ioBuffer(BODY_LENGTH_LENGTH, BODY_LENGTH_LENGTH).writeInt(frameBody.length));
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/DataFrameEncoder.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/DataFrameEncoder.java
index 1909885..6b5cb7b 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/DataFrameEncoder.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/DataFrameEncoder.java
@@ -18,12 +18,12 @@
  */
 package org.apache.nemo.runtime.executor.bytetransfer;
 
-import org.apache.nemo.runtime.common.comm.ControlMessage;
 import io.netty.buffer.ByteBuf;
 import io.netty.channel.ChannelHandler;
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.handler.codec.MessageToMessageEncoder;
 import io.netty.util.Recycler;
+import org.apache.nemo.runtime.common.comm.ControlMessage;
 
 import javax.annotation.Nullable;
 import javax.inject.Inject;
@@ -114,9 +114,9 @@
     /**
      * Creates a {@link DataFrame} to supply content to sub-stream.
      *
-     * @param contextId   the context id
-     * @param body        the body or {@code null}
-     * @param length      the length of the body, in bytes
+     * @param contextId      the context id
+     * @param body           the body or {@code null}
+     * @param length         the length of the body, in bytes
      * @param opensSubStream whether this frame opens a new sub-stream or not
      * @return the {@link DataFrame} object
      */
@@ -135,7 +135,8 @@
 
     /**
      * Creates a {@link DataFrame} to close the whole context.
-     * @param contextId   the context id
+     *
+     * @param contextId the context id
      * @return the {@link DataFrame} object
      */
     static DataFrame newInstance(final ByteTransferContext.ContextId contextId) {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/FrameDecoder.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/FrameDecoder.java
index 28bd025..3bb9d35 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/FrameDecoder.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/FrameDecoder.java
@@ -19,24 +19,24 @@
 package org.apache.nemo.runtime.executor.bytetransfer;
 
 import com.google.protobuf.InvalidProtocolBufferException;
-import org.apache.nemo.runtime.common.comm.ControlMessage.ByteTransferContextSetupMessage;
-import org.apache.nemo.runtime.common.comm.ControlMessage.ByteTransferDataDirection;
 import io.netty.buffer.ByteBuf;
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.handler.codec.ByteToMessageDecoder;
+import org.apache.nemo.runtime.common.comm.ControlMessage.ByteTransferContextSetupMessage;
+import org.apache.nemo.runtime.common.comm.ControlMessage.ByteTransferDataDirection;
 
 import java.util.List;
 
 /**
  * Interprets inbound byte streams to compose frames.
- *
+ * <p>
  * <p>
  * More specifically,
  * <ul>
- *   <li>Recognizes the type of the frame, namely control or data.</li>
- *   <li>If the received bytes are a part of a control frame, waits until the full content of the frame becomes
- *   available and decode the frame to emit a control frame object.</li>
- *   <li>If the received bytes consists a data frame, supply the data to the corresponding {@link ByteInputContext}.
+ * <li>Recognizes the type of the frame, namely control or data.</li>
+ * <li>If the received bytes are a part of a control frame, waits until the full content of the frame becomes
+ * available and decode the frame to emit a control frame object.</li>
+ * <li>If the received bytes consists a data frame, supply the data to the corresponding {@link ByteInputContext}.
  * </ul>
  *
  * <h3>Control frame specification:</h3>
@@ -95,7 +95,7 @@
 
   @Override
   protected void decode(final ChannelHandlerContext ctx, final ByteBuf in, final List out)
-      throws InvalidProtocolBufferException {
+    throws InvalidProtocolBufferException {
     while (true) {
       final boolean toContinue;
       if (controlBodyBytesToRead > 0) {
@@ -141,14 +141,14 @@
       // setup context for reading data frame body
       dataBodyBytesToRead = length;
       final ByteTransferDataDirection dataDirection = (flags & ((byte) (1 << 2))) == 0
-          ? ByteTransferDataDirection.INITIATOR_SENDS_DATA : ByteTransferDataDirection.INITIATOR_RECEIVES_DATA;
+        ? ByteTransferDataDirection.INITIATOR_SENDS_DATA : ByteTransferDataDirection.INITIATOR_RECEIVES_DATA;
       final boolean newSubStreamFlag = (flags & ((byte) (1 << 1))) != 0;
       isLastFrame = (flags & ((byte) (1 << 0))) != 0;
       inputContext = contextManager.getInputContext(dataDirection, transferIndex);
       if (inputContext == null) {
         throw new IllegalStateException(String.format("Transport context for %s:%d was not found between the local"
-                + "address %s and the remote address %s", dataDirection, transferIndex,
-            ctx.channel().localAddress(), ctx.channel().remoteAddress()));
+            + "address %s and the remote address %s", dataDirection, transferIndex,
+          ctx.channel().localAddress(), ctx.channel().remoteAddress()));
       }
       if (newSubStreamFlag) {
         inputContext.onNewStream();
@@ -169,7 +169,7 @@
    * @throws InvalidProtocolBufferException when failed to parse
    */
   private boolean onControlBodyAdded(final ByteBuf in, final List out)
-      throws InvalidProtocolBufferException {
+    throws InvalidProtocolBufferException {
     assert (controlBodyBytesToRead > 0);
     assert (dataBodyBytesToRead == 0);
     assert (inputContext == null);
@@ -192,7 +192,7 @@
       offset = 0;
     }
     final ByteTransferContextSetupMessage controlMessage
-        = ByteTransferContextSetupMessage.PARSER.parseFrom(bytes, offset, (int) controlBodyBytesToRead);
+      = ByteTransferContextSetupMessage.PARSER.parseFrom(bytes, offset, (int) controlBodyBytesToRead);
 
     out.add(controlMessage);
     in.skipBytes((int) controlBodyBytesToRead);
@@ -203,7 +203,7 @@
   /**
    * Supply byte stream to an existing {@link ByteInputContext}.
    *
-   * @param in  the {@link ByteBuf} from which to read data
+   * @param in the {@link ByteBuf} from which to read data
    * @throws InterruptedException when interrupted while adding to {@link ByteBuf} queue
    */
   private void onDataBodyAdded(final ByteBuf in) {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/BlockManagerWorker.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/BlockManagerWorker.java
index ec8df24..6aed5f1 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/BlockManagerWorker.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/BlockManagerWorker.java
@@ -20,13 +20,14 @@
 
 import com.google.protobuf.ByteString;
 import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.commons.lang3.SerializationUtils;
 import org.apache.nemo.common.KeyRange;
 import org.apache.nemo.common.exception.BlockFetchException;
 import org.apache.nemo.common.exception.BlockWriteException;
 import org.apache.nemo.common.exception.UnsupportedBlockStoreException;
 import org.apache.nemo.common.exception.UnsupportedExecutionPropertyException;
-import org.apache.nemo.common.ir.edge.executionproperty.DataStoreProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.DataPersistenceProperty;
+import org.apache.nemo.common.ir.edge.executionproperty.DataStoreProperty;
 import org.apache.nemo.conf.JobConf;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
@@ -39,21 +40,24 @@
 import org.apache.nemo.runtime.executor.data.block.FileBlock;
 import org.apache.nemo.runtime.executor.data.partition.NonSerializedPartition;
 import org.apache.nemo.runtime.executor.data.partition.SerializedPartition;
-import org.apache.nemo.runtime.executor.data.stores.BlockStore;
 import org.apache.nemo.runtime.executor.data.stores.*;
-import org.apache.commons.lang3.SerializationUtils;
 import org.apache.reef.tang.annotations.Parameter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.annotation.concurrent.ThreadSafe;
 import javax.inject.Inject;
 import java.io.IOException;
-import java.util.*;
-import java.util.concurrent.*;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * Executor-side block manager.
  */
@@ -93,7 +97,7 @@
    * @param persistentConnectionToMasterMap the connection map.
    * @param byteTransfer                    the byte transfer.
    * @param serializerManager               the serializer manager.
-   * @param blockTransferThrottler    restricts parallel connections
+   * @param blockTransferThrottler          restricts parallel connections
    */
   @Inject
   private BlockManagerWorker(@Parameter(JobConf.ExecutorId.class) final String executorId,
@@ -125,7 +129,7 @@
   /**
    * Creates a new block.
    *
-   * @param blockId the ID of the block to create.
+   * @param blockId    the ID of the block to create.
    * @param blockStore the store to place the block.
    * @return the created block.
    * @throws BlockWriteException for any error occurred while trying to create a block.
@@ -148,31 +152,31 @@
    * @return the {@link CompletableFuture} of the block.
    */
   public CompletableFuture<DataUtil.IteratorWithNumBytes> readBlock(
-      final String blockIdWildcard,
-      final String runtimeEdgeId,
-      final DataStoreProperty.Value blockStore,
-      final KeyRange keyRange) {
+    final String blockIdWildcard,
+    final String runtimeEdgeId,
+    final DataStoreProperty.Value blockStore,
+    final KeyRange keyRange) {
     // Let's see if a remote worker has it
     final CompletableFuture<ControlMessage.Message> blockLocationFuture =
-        pendingBlockLocationRequest.computeIfAbsent(blockIdWildcard, blockIdToRequest -> {
-          // Ask Master for the location.
-          // (IMPORTANT): This 'request' effectively blocks the TaskExecutor thread if the block is IN_PROGRESS.
-          // We use this property to make the receiver task of a 'push' edge to wait in an Executor for its input data
-          // to become available.
-          final CompletableFuture<ControlMessage.Message> responseFromMasterFuture = persistentConnectionToMasterMap
-              .getMessageSender(MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID).request(
-                  ControlMessage.Message.newBuilder()
-                      .setId(RuntimeIdManager.generateMessageId())
-                      .setListenerId(MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID)
-                      .setType(ControlMessage.MessageType.RequestBlockLocation)
-                      .setRequestBlockLocationMsg(
-                          ControlMessage.RequestBlockLocationMsg.newBuilder()
-                              .setExecutorId(executorId)
-                              .setBlockIdWildcard(blockIdWildcard)
-                              .build())
-                      .build());
-          return responseFromMasterFuture;
-        });
+      pendingBlockLocationRequest.computeIfAbsent(blockIdWildcard, blockIdToRequest -> {
+        // Ask Master for the location.
+        // (IMPORTANT): This 'request' effectively blocks the TaskExecutor thread if the block is IN_PROGRESS.
+        // We use this property to make the receiver task of a 'push' edge to wait in an Executor for its input data
+        // to become available.
+        final CompletableFuture<ControlMessage.Message> responseFromMasterFuture = persistentConnectionToMasterMap
+          .getMessageSender(MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID).request(
+            ControlMessage.Message.newBuilder()
+              .setId(RuntimeIdManager.generateMessageId())
+              .setListenerId(MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID)
+              .setType(ControlMessage.MessageType.RequestBlockLocation)
+              .setRequestBlockLocationMsg(
+                ControlMessage.RequestBlockLocationMsg.newBuilder()
+                  .setExecutorId(executorId)
+                  .setBlockIdWildcard(blockIdWildcard)
+                  .build())
+              .build());
+        return responseFromMasterFuture;
+      });
     blockLocationFuture.whenComplete((message, throwable) -> {
       pendingBlockLocationRequest.remove(blockIdWildcard);
     });
@@ -184,11 +188,11 @@
       }
 
       final ControlMessage.BlockLocationInfoMsg blockLocationInfoMsg =
-          responseFromMaster.getBlockLocationInfoMsg();
+        responseFromMaster.getBlockLocationInfoMsg();
       if (!blockLocationInfoMsg.hasOwnerExecutorId()) {
         throw new BlockFetchException(new Throwable(
-            "Block " + blockIdWildcard + " location unknown: "
-                + "The block state is " + blockLocationInfoMsg.getState()));
+          "Block " + blockIdWildcard + " location unknown: "
+            + "The block state is " + blockLocationInfoMsg.getState()));
       }
 
       // This is the executor id that we wanted to know
@@ -206,8 +210,8 @@
             .setKeyRange(ByteString.copyFrom(SerializationUtils.serialize(keyRange)))
             .build();
         final CompletableFuture<ByteInputContext> contextFuture = blockTransferThrottler
-            .requestTransferPermission(runtimeEdgeId)
-            .thenCompose(obj -> byteTransfer.newInputContext(targetExecutorId, descriptor.toByteArray(), false));
+          .requestTransferPermission(runtimeEdgeId)
+          .thenCompose(obj -> byteTransfer.newInputContext(targetExecutorId, descriptor.toByteArray(), false));
 
         // whenComplete() ensures that blockTransferThrottler.onTransferFinished() is always called,
         // even on failures. Actual failure handling and Task retry will be done by DataFetcher.
@@ -224,8 +228,8 @@
         });
 
         return contextFuture
-            .thenApply(context -> new DataUtil.InputStreamIterator(context.getInputStreams(),
-                serializerManager.getSerializer(runtimeEdgeId)));
+          .thenApply(context -> new DataUtil.InputStreamIterator(context.getInputStreams(),
+            serializerManager.getSerializer(runtimeEdgeId)));
       }
     });
   }
@@ -233,10 +237,10 @@
   /**
    * Writes a block to a store.
    *
-   * @param block                the block to write.
-   * @param blockStore           the store to save the block.
-   * @param expectedReadTotal    the expected number of read for this block.
-   * @param persistence          how to handle the used block.
+   * @param block             the block to write.
+   * @param blockStore        the store to save the block.
+   * @param expectedReadTotal the expected number of read for this block.
+   * @param persistence       how to handle the used block.
    */
   public void writeBlock(final Block block,
                          final DataStoreProperty.Value blockStore,
@@ -259,10 +263,10 @@
     final BlockStore store = getBlockStore(blockStore);
     store.writeBlock(block);
     final ControlMessage.BlockStateChangedMsg.Builder blockStateChangedMsgBuilder =
-        ControlMessage.BlockStateChangedMsg.newBuilder()
-            .setExecutorId(executorId)
-            .setBlockId(blockId)
-            .setState(ControlMessage.BlockStateFromExecutor.AVAILABLE);
+      ControlMessage.BlockStateChangedMsg.newBuilder()
+        .setExecutorId(executorId)
+        .setBlockId(blockId)
+        .setState(ControlMessage.BlockStateFromExecutor.AVAILABLE);
 
     if (DataStoreProperty.Value.GlusterFileStore.equals(blockStore)) {
       blockStateChangedMsgBuilder.setLocation(REMOTE_FILE_STORE);
@@ -271,12 +275,12 @@
     }
 
     persistentConnectionToMasterMap.getMessageSender(MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID)
-        .send(ControlMessage.Message.newBuilder()
-            .setId(RuntimeIdManager.generateMessageId())
-            .setListenerId(MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID)
-            .setType(ControlMessage.MessageType.BlockStateChanged)
-            .setBlockStateChangedMsg(blockStateChangedMsgBuilder.build())
-            .build());
+      .send(ControlMessage.Message.newBuilder()
+        .setId(RuntimeIdManager.generateMessageId())
+        .setListenerId(MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID)
+        .setType(ControlMessage.MessageType.BlockStateChanged)
+        .setBlockStateChangedMsg(blockStateChangedMsgBuilder.build())
+        .build());
   }
 
   /**
@@ -293,10 +297,10 @@
 
     if (deleted) {
       final ControlMessage.BlockStateChangedMsg.Builder blockStateChangedMsgBuilder =
-          ControlMessage.BlockStateChangedMsg.newBuilder()
-              .setExecutorId(executorId)
-              .setBlockId(blockId)
-              .setState(ControlMessage.BlockStateFromExecutor.NOT_AVAILABLE);
+        ControlMessage.BlockStateChangedMsg.newBuilder()
+          .setExecutorId(executorId)
+          .setBlockId(blockId)
+          .setState(ControlMessage.BlockStateFromExecutor.NOT_AVAILABLE);
 
       if (DataStoreProperty.Value.GlusterFileStore.equals(blockStore)) {
         blockStateChangedMsgBuilder.setLocation(REMOTE_FILE_STORE);
@@ -305,12 +309,12 @@
       }
 
       persistentConnectionToMasterMap.getMessageSender(MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID)
-          .send(ControlMessage.Message.newBuilder()
-              .setId(RuntimeIdManager.generateMessageId())
-              .setListenerId(MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID)
-              .setType(ControlMessage.MessageType.BlockStateChanged)
-              .setBlockStateChangedMsg(blockStateChangedMsgBuilder)
-              .build());
+        .send(ControlMessage.Message.newBuilder()
+          .setId(RuntimeIdManager.generateMessageId())
+          .setListenerId(MessageEnvironment.BLOCK_MANAGER_MASTER_MESSAGE_LISTENER_ID)
+          .setType(ControlMessage.MessageType.BlockStateChanged)
+          .setBlockStateChangedMsg(blockStateChangedMsgBuilder)
+          .build());
     } else {
       throw new BlockFetchException(new Throwable("Cannot find corresponding block " + blockId));
     }
@@ -341,7 +345,7 @@
           final Optional<Block> optionalBlock = getBlockStore(blockStore).readBlock(blockId);
           if (optionalBlock.isPresent()) {
             if (DataStoreProperty.Value.LocalFileStore.equals(blockStore)
-                || DataStoreProperty.Value.GlusterFileStore.equals(blockStore)) {
+              || DataStoreProperty.Value.GlusterFileStore.equals(blockStore)) {
               final List<FileArea> fileAreas = ((FileBlock) optionalBlock.get()).asFileAreas(keyRange);
               for (final FileArea fileArea : fileAreas) {
                 try (ByteOutputContext.ByteOutputStream os = outputContext.newOutputStream()) {
@@ -394,9 +398,9 @@
    * @return the result data in the block.
    */
   private CompletableFuture<DataUtil.IteratorWithNumBytes> getDataFromLocalBlock(
-      final String blockId,
-      final DataStoreProperty.Value blockStore,
-      final KeyRange keyRange) {
+    final String blockId,
+    final DataStoreProperty.Value blockStore,
+    final KeyRange keyRange) {
     final BlockStore store = getBlockStore(blockStore);
 
     // First, try to fetch the block from local BlockStore.
@@ -418,7 +422,7 @@
           }
 
           return CompletableFuture.completedFuture(DataUtil.IteratorWithNumBytes.of(innerIterator, numSerializedBytes,
-              numEncodedBytes));
+            numEncodedBytes));
         } catch (final DataUtil.IteratorWithNumBytes.NumBytesNotSupportedException e) {
           return CompletableFuture.completedFuture(DataUtil.IteratorWithNumBytes.of(innerIterator));
         }
@@ -459,6 +463,7 @@
 
   /**
    * Gets the {@link BlockStore} from annotated value of {@link DataStoreProperty}.
+   *
    * @param blockStore the annotated value of {@link DataStoreProperty}.
    * @return the block store.
    */
@@ -480,11 +485,12 @@
 
   /**
    * Decodes BlockStore property from protocol buffer.
+   *
    * @param blockStore property from protocol buffer
    * @return the corresponding {@link DataStoreProperty} value
    */
   private static ControlMessage.BlockStore convertBlockStore(
-      final DataStoreProperty.Value blockStore) {
+    final DataStoreProperty.Value blockStore) {
     switch (blockStore) {
       case MemoryStore:
         return ControlMessage.BlockStore.MEMORY;
@@ -502,11 +508,12 @@
 
   /**
    * Encodes {@link DataStoreProperty} value into protocol buffer property.
+   *
    * @param blockStoreType {@link DataStoreProperty} value
    * @return the corresponding {@link ControlMessage.BlockStore} value
    */
   private static DataStoreProperty.Value convertBlockStore(
-      final ControlMessage.BlockStore blockStoreType) {
+    final ControlMessage.BlockStore blockStoreType) {
     switch (blockStoreType) {
       case MEMORY:
         return DataStoreProperty.Value.MemoryStore;
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/BlockTransferThrottler.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/BlockTransferThrottler.java
index 642eaf2..c4e7450 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/BlockTransferThrottler.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/BlockTransferThrottler.java
@@ -48,6 +48,7 @@
 
   /**
    * Request a permission to make a connection.
+   *
    * @param runtimeEdgeId the corresponding runtime edge id.
    * @return a future that will be completed when the connection is granted.
    */
@@ -70,6 +71,7 @@
 
   /**
    * Indicates the transfer has finished.
+   *
    * @param runtimeEdgeId the corresponding runtime edge id.
    */
   public synchronized void onTransferFinished(final String runtimeEdgeId) {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/BroadcastManagerWorker.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/BroadcastManagerWorker.java
index 42806b7..11d095b 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/BroadcastManagerWorker.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/BroadcastManagerWorker.java
@@ -22,13 +22,13 @@
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
 import com.google.protobuf.ByteString;
+import net.jcip.annotations.ThreadSafe;
+import org.apache.commons.lang.SerializationUtils;
 import org.apache.nemo.conf.JobConf;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.message.MessageEnvironment;
 import org.apache.nemo.runtime.common.message.PersistentConnectionToMasterMap;
-import net.jcip.annotations.ThreadSafe;
-import org.apache.commons.lang.SerializationUtils;
 import org.apache.reef.tang.annotations.Parameter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -55,7 +55,7 @@
    * remote executors or the master.
    *
    * @param executorId of the executor.
-   * @param toMaster connection.
+   * @param toMaster   connection.
    */
   @Inject
   private BroadcastManagerWorker(@Parameter(JobConf.ExecutorId.class) final String executorId,
@@ -88,10 +88,11 @@
 
   /**
    * Get the variable with the id.
+   *
    * @param id of the variable.
    * @return the variable.
    */
-  public Object get(final Serializable id)  {
+  public Object get(final Serializable id) {
     LOG.info("get {}", id);
     try {
       return idToVariableCache.get(id);
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/DataUtil.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/DataUtil.java
index 29b61c8..f5440ee 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/DataUtil.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/DataUtil.java
@@ -81,7 +81,7 @@
                                                                                      final Serializer serializer,
                                                                                      final K key,
                                                                                      final InputStream inputStream)
-      throws IOException {
+    throws IOException {
     final List deserializedData = new ArrayList();
     // We need to limit read bytes on this inputStream, which could be over-read by wrapped
     // compression stream. This depends on the nature of the compression algorithm used.
@@ -89,10 +89,10 @@
     // reading input from chained compression InputStream.
     try (final LimitedInputStream limitedInputStream = new LimitedInputStream(inputStream, partitionSize)) {
       final InputStreamIterator iterator =
-          new InputStreamIterator(Collections.singletonList(limitedInputStream).iterator(), serializer);
+        new InputStreamIterator(Collections.singletonList(limitedInputStream).iterator(), serializer);
       iterator.forEachRemaining(deserializedData::add);
       return new NonSerializedPartition(key, deserializedData, iterator.getNumSerializedBytes(),
-          iterator.getNumEncodedBytes());
+        iterator.getNumEncodedBytes());
     }
   }
 
@@ -107,13 +107,13 @@
    * @throws IOException if fail to convert.
    */
   public static <K extends Serializable> Iterable<SerializedPartition<K>> convertToSerPartitions(
-      final Serializer serializer,
-      final Iterable<NonSerializedPartition<K>> partitionsToConvert) throws IOException {
+    final Serializer serializer,
+    final Iterable<NonSerializedPartition<K>> partitionsToConvert) throws IOException {
     final List<SerializedPartition<K>> serializedPartitions = new ArrayList<>();
     for (final NonSerializedPartition<K> partitionToConvert : partitionsToConvert) {
       try (
-          final DirectByteArrayOutputStream bytesOutputStream = new DirectByteArrayOutputStream();
-          final OutputStream wrappedStream = buildOutputStream(bytesOutputStream, serializer.getEncodeStreamChainers());
+        final DirectByteArrayOutputStream bytesOutputStream = new DirectByteArrayOutputStream();
+        final OutputStream wrappedStream = buildOutputStream(bytesOutputStream, serializer.getEncodeStreamChainers());
       ) {
         serializePartition(serializer.getEncoderFactory(), partitionToConvert, wrappedStream);
         // We need to close wrappedStream on here, because DirectByteArrayOutputStream:getBufDirectly() returns
@@ -124,7 +124,7 @@
         final byte[] serializedBytes = bytesOutputStream.getBufDirectly();
         final int actualLength = bytesOutputStream.size();
         serializedPartitions.add(
-            new SerializedPartition<>(partitionToConvert.getKey(), serializedBytes, actualLength));
+          new SerializedPartition<>(partitionToConvert.getKey(), serializedBytes, actualLength));
       }
     }
     return serializedPartitions;
@@ -141,17 +141,17 @@
    * @throws IOException if fail to convert.
    */
   public static <K extends Serializable> Iterable<NonSerializedPartition<K>> convertToNonSerPartitions(
-      final Serializer serializer,
-      final Iterable<SerializedPartition<K>> partitionsToConvert) throws IOException {
+    final Serializer serializer,
+    final Iterable<SerializedPartition<K>> partitionsToConvert) throws IOException {
     final List<NonSerializedPartition<K>> nonSerializedPartitions = new ArrayList<>();
     for (final SerializedPartition<K> partitionToConvert : partitionsToConvert) {
       final K key = partitionToConvert.getKey();
 
 
       try (final ByteArrayInputStream byteArrayInputStream =
-               new ByteArrayInputStream(partitionToConvert.getData())) {
+             new ByteArrayInputStream(partitionToConvert.getData())) {
         final NonSerializedPartition<K> deserializePartition = deserializePartition(
-            partitionToConvert.getLength(), serializer, key, byteArrayInputStream);
+          partitionToConvert.getLength(), serializer, key, byteArrayInputStream);
         nonSerializedPartitions.add(deserializePartition);
       }
     }
@@ -191,7 +191,7 @@
    * @throws IOException if fail to concatenate.
    */
   public static Iterable concatNonSerPartitions(final Iterable<NonSerializedPartition> partitionsToConcat)
-      throws IOException {
+    throws IOException {
     final List concatStreamBase = new ArrayList<>();
     Stream<Object> concatStream = concatStreamBase.stream();
     for (final NonSerializedPartition nonSerializedPartition : partitionsToConcat) {
@@ -246,7 +246,7 @@
             if (inputStreams.hasNext()) {
               serializedCountingStream = new CountingInputStream(inputStreams.next());
               encodedCountingStream = new CountingInputStream(buildInputStream(
-                  serializedCountingStream, serializer.getDecodeStreamChainers()));
+                serializedCountingStream, serializer.getDecodeStreamChainers()));
               decoder = serializer.getDecoderFactory().create(encodedCountingStream);
             } else {
               cannotContinueDecoding = true;
@@ -311,7 +311,7 @@
    */
   public static InputStream buildInputStream(final InputStream in,
                                              final List<DecodeStreamChainer> decodeStreamChainers)
-      throws IOException {
+    throws IOException {
     InputStream chained = in;
     for (final DecodeStreamChainer encodeStreamChainer : decodeStreamChainers) {
       chained = encodeStreamChainer.chainInput(chained);
@@ -329,7 +329,7 @@
    */
   public static OutputStream buildOutputStream(final OutputStream out,
                                                final List<EncodeStreamChainer> encodeStreamChainers)
-      throws IOException {
+    throws IOException {
     OutputStream chained = out;
     final List<EncodeStreamChainer> temporaryEncodeStreamChainerList = new ArrayList<>(encodeStreamChainers);
     Collections.reverse(temporaryEncodeStreamChainerList);
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/FileArea.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/FileArea.java
index 7064a22..c6307f6 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/FileArea.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/FileArea.java
@@ -31,9 +31,9 @@
   /**
    * Creates a file area.
    *
-   * @param path      the path to the file
-   * @param position  the starting position of the area
-   * @param count     the length of the area
+   * @param path     the path to the file
+   * @param position the starting position of the area
+   * @param count    the length of the area
    */
   public FileArea(final String path, final long position, final long count) {
     this.path = path;
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/PipeContainer.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/PipeContainer.java
index a49ef78..1113f1e 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/PipeContainer.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/PipeContainer.java
@@ -36,10 +36,10 @@
 /**
  * Writes happen in a serialized manner with {@link PipeContainer#putPipeListIfAbsent(Pair, int)}.
  * This ensures that each key is initialized exactly once, and never updated.
- *
+ * <p>
  * Writes and reads for the same key never occur concurrently with no problem, because
  * (1) write never updates, and (2) read happens only after the write.
- *
+ * <p>
  * Reads can happen concurrently with no problem.
  */
 @ThreadSafe
@@ -53,6 +53,7 @@
 
   /**
    * Blocks the get operation when the number of elements is smaller than expected.
+   *
    * @param <T> type of the value.
    */
   class CountBasedBlockingContainer<T> {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/PipeManagerWorker.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/PipeManagerWorker.java
index a433f3a..97c7f1f 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/PipeManagerWorker.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/PipeManagerWorker.java
@@ -39,14 +39,14 @@
 
 import javax.annotation.concurrent.ThreadSafe;
 import javax.inject.Inject;
-import java.util.*;
+import java.util.List;
 import java.util.concurrent.CompletableFuture;
 
 /**
  * Two threads use this class
  * - Network thread: Saves pipe connections created from destination tasks.
  * - Task executor thread: Creates new pipe connections to destination tasks (read),
- *                         or retrieves a saved pipe connection (write)
+ * or retrieves a saved pipe connection (write)
  */
 @ThreadSafe
 public final class PipeManagerWorker {
@@ -141,7 +141,7 @@
   /**
    * (SYNCHRONIZATION) Called by task threads.
    *
-   * @param runtimeEdge runtime edge
+   * @param runtimeEdge  runtime edge
    * @param srcTaskIndex source task index
    * @return output contexts.
    */
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/SerializerManager.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/SerializerManager.java
index dc46ccb..64b51d9 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/SerializerManager.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/SerializerManager.java
@@ -20,8 +20,8 @@
 
 import org.apache.nemo.common.coder.DecoderFactory;
 import org.apache.nemo.common.coder.EncoderFactory;
-import org.apache.nemo.runtime.executor.data.streamchainer.*;
 import org.apache.nemo.common.ir.edge.executionproperty.CompressionProperty;
+import org.apache.nemo.runtime.executor.data.streamchainer.*;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -82,17 +82,17 @@
     // Compression chain
     if (compressionProperty != null) {
       LOG.debug("Adding {} compression chain for {}",
-          compressionProperty, runtimeEdgeId);
+        compressionProperty, runtimeEdgeId);
       encodeStreamChainers.add(new CompressionStreamChainer(compressionProperty));
     }
     if (decompressionProperty != null) {
       LOG.debug("Adding {} decompression chain for {}",
-          decompressionProperty, runtimeEdgeId);
+        decompressionProperty, runtimeEdgeId);
       decodeStreamChainers.add(new DecompressionStreamChainer(decompressionProperty));
     }
 
     final Serializer serializer =
-        new Serializer(encoderFactory, decoderFactory, encodeStreamChainers, decodeStreamChainers);
+      new Serializer(encoderFactory, decoderFactory, encodeStreamChainers, decodeStreamChainers);
     runtimeEdgeIdToSerializer.putIfAbsent(runtimeEdgeId, serializer);
   }
 
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/FileBlock.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/FileBlock.java
index 2f42012..5f39c52 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/FileBlock.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/FileBlock.java
@@ -22,13 +22,14 @@
 import org.apache.nemo.common.Pair;
 import org.apache.nemo.common.exception.BlockFetchException;
 import org.apache.nemo.common.exception.BlockWriteException;
-import org.apache.nemo.runtime.executor.data.*;
+import org.apache.nemo.runtime.executor.data.DataUtil;
+import org.apache.nemo.runtime.executor.data.FileArea;
+import org.apache.nemo.runtime.executor.data.metadata.FileMetadata;
+import org.apache.nemo.runtime.executor.data.metadata.PartitionMetadata;
 import org.apache.nemo.runtime.executor.data.partition.NonSerializedPartition;
 import org.apache.nemo.runtime.executor.data.partition.Partition;
 import org.apache.nemo.runtime.executor.data.partition.SerializedPartition;
 import org.apache.nemo.runtime.executor.data.streamchainer.Serializer;
-import org.apache.nemo.runtime.executor.data.metadata.PartitionMetadata;
-import org.apache.nemo.runtime.executor.data.metadata.FileMetadata;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -81,7 +82,7 @@
    * @throws IOException if fail to write.
    */
   private void writeToFile(final Iterable<SerializedPartition<K>> serializedPartitions)
-      throws IOException {
+    throws IOException {
     try (final FileOutputStream fileOutputStream = new FileOutputStream(filePath, true)) {
       for (final SerializedPartition<K> serializedPartition : serializedPartitions) {
         // Reserve a partition write and get the metadata.
@@ -128,13 +129,13 @@
    */
   @Override
   public void writePartitions(final Iterable<NonSerializedPartition<K>> partitions)
-      throws BlockWriteException {
+    throws BlockWriteException {
     if (metadata.isCommitted()) {
       throw new BlockWriteException(new Throwable("The partition is already committed!"));
     } else {
       try {
         final Iterable<SerializedPartition<K>> convertedPartitions =
-            DataUtil.convertToSerPartitions(serializer, partitions);
+          DataUtil.convertToSerPartitions(serializer, partitions);
         writeSerializedPartitions(convertedPartitions);
       } catch (final IOException e) {
         throw new BlockWriteException(e);
@@ -151,7 +152,7 @@
    */
   @Override
   public void writeSerializedPartitions(final Iterable<SerializedPartition<K>> partitions)
-      throws BlockWriteException {
+    throws BlockWriteException {
     if (metadata.isCommitted()) {
       throw new BlockWriteException(new Throwable("The partition is already committed!"));
     } else {
@@ -195,9 +196,9 @@
         }
         for (final Pair<K, byte[]> partitionKeyBytes : partitionKeyBytesPairs) {
           final NonSerializedPartition<K> deserializePartition =
-              DataUtil.deserializePartition(
-                  partitionKeyBytes.right().length, serializer, partitionKeyBytes.left(),
-                  new ByteArrayInputStream(partitionKeyBytes.right()));
+            DataUtil.deserializePartition(
+              partitionKeyBytes.right().length, serializer, partitionKeyBytes.left(),
+              new ByteArrayInputStream(partitionKeyBytes.right()));
           deserializedPartitions.add(deserializePartition);
         }
       } catch (final IOException e) {
@@ -235,7 +236,7 @@
                 throw new IOException("The read data size does not match with the partition size.");
               }
               partitionsInRange.add(new SerializedPartition<>(
-                  key, serializedData, serializedData.length));
+                key, serializedData, serializedData.length));
             } else {
               // Have to skip this partition.
               skipBytes(fileStream, partitionmetadata.getPartitionSize());
@@ -323,7 +324,7 @@
         final long partitionSize = partitionMetadata.getPartitionSize();
         if (partitionSizes.containsKey(key)) {
           partitionSizes.compute(key,
-              (existingKey, existingValue) -> existingValue + partitionSize);
+            (existingKey, existingValue) -> existingValue + partitionSize);
         } else {
           partitionSizes.put(key, partitionSize);
         }
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/NonSerializedMemoryBlock.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/NonSerializedMemoryBlock.java
index 6ba51f3..eee0759 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/NonSerializedMemoryBlock.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/NonSerializedMemoryBlock.java
@@ -78,7 +78,7 @@
     } else {
       try {
         final NonSerializedPartition<K> partition =
-            nonCommittedPartitionsMap.computeIfAbsent(key, absentKey -> new NonSerializedPartition<>(key));
+          nonCommittedPartitionsMap.computeIfAbsent(key, absentKey -> new NonSerializedPartition<>(key));
         partition.write(element);
       } catch (final IOException e) {
         throw new BlockWriteException(e);
@@ -118,7 +118,7 @@
     if (!committed) {
       try {
         final Iterable<NonSerializedPartition<K>> convertedPartitions =
-            DataUtil.convertToNonSerPartitions(serializer, partitions);
+          DataUtil.convertToNonSerPartitions(serializer, partitions);
         writePartitions(convertedPartitions);
       } catch (final IOException e) {
         throw new BlockWriteException(e);
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/SerializedMemoryBlock.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/SerializedMemoryBlock.java
index c449a39..6ecc2b9 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/SerializedMemoryBlock.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/SerializedMemoryBlock.java
@@ -102,7 +102,7 @@
     if (!committed) {
       try {
         final Iterable<SerializedPartition<K>> convertedPartitions = DataUtil.convertToSerPartitions(
-            serializer, partitions);
+          serializer, partitions);
         writeSerializedPartitions(convertedPartitions);
       } catch (final IOException e) {
         throw new BlockWriteException(e);
@@ -192,7 +192,7 @@
         final long partitionSize = serializedPartition.getLength();
         if (partitionSizes.containsKey(key)) {
           partitionSizes.compute(key,
-              (existingKey, existingValue) -> existingValue + partitionSize);
+            (existingKey, existingValue) -> existingValue + partitionSize);
         } else {
           partitionSizes.put(key, partitionSize);
         }
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/FileMetadata.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/FileMetadata.java
index 2f95871..da44a52 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/FileMetadata.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/FileMetadata.java
@@ -29,6 +29,7 @@
  * This class represents a metadata for a {@link org.apache.nemo.runtime.executor.data.block.Block}.
  * The writer and reader determine the status of a file block
  * (such as accessibility, how many bytes are written, etc.) by using this metadata.
+ *
  * @param <K> the key type of its partitions.
  */
 public abstract class FileMetadata<K extends Serializable> {
@@ -48,6 +49,7 @@
 
   /**
    * Construct a file metadata with existing partition metadata.
+   *
    * @param partitionMetadataList the partition metadata list.
    */
   public FileMetadata(final List<PartitionMetadata<K>> partitionMetadataList) {
@@ -59,7 +61,7 @@
   /**
    * Writes the metadata for a partition.
    *
-   * @param key     the key of the partition.
+   * @param key           the key of the partition.
    * @param partitionSize the size of the partition.
    * @throws IOException if fail to append the partition metadata.
    */
@@ -70,7 +72,7 @@
     }
 
     final PartitionMetadata partitionMetadata =
-        new PartitionMetadata(key, partitionSize, writtenBytesCursor);
+      new PartitionMetadata(key, partitionSize, writtenBytesCursor);
     partitionMetadataList.add(partitionMetadata);
     writtenBytesCursor += partitionSize;
   }
@@ -101,6 +103,7 @@
 
   /**
    * Set the commit value.
+   *
    * @param committed whether this block is committed or not.
    */
   protected final void setCommitted(final boolean committed) {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/LocalFileMetadata.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/LocalFileMetadata.java
index 9c6278d..96e138e 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/LocalFileMetadata.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/LocalFileMetadata.java
@@ -24,6 +24,7 @@
 /**
  * This class represents a metadata for a local file {@link org.apache.nemo.runtime.executor.data.block.Block}.
  * It resides in local only, and does not synchronize globally.
+ *
  * @param <K> the key type of its partitions.
  */
 @ThreadSafe
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/PartitionMetadata.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/PartitionMetadata.java
index 85f7f16..43580d6 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/PartitionMetadata.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/PartitionMetadata.java
@@ -22,6 +22,7 @@
 
 /**
  * This class represents a metadata for a partition.
+ *
  * @param <K> the key type of its partitions.
  */
 public final class PartitionMetadata<K extends Serializable> {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/RemoteFileMetadata.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/RemoteFileMetadata.java
index b54361d..cbce86b 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/RemoteFileMetadata.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/RemoteFileMetadata.java
@@ -32,6 +32,7 @@
  * Because the data is stored in a remote file and globally accessed by multiple nodes,
  * each read, or deletion for a block needs one instance of this metadata.
  * The metadata is store in and read from a file (after a remote file block is committed).
+ *
  * @param <K> the key type of its partitions.
  */
 @ThreadSafe
@@ -77,8 +78,8 @@
   public synchronized void commitBlock() throws IOException {
     final Iterable<PartitionMetadata<K>> partitionMetadataItr = getPartitionMetadataList();
     try (
-        final FileOutputStream metafileOutputStream = new FileOutputStream(metaFilePath, false);
-        final DataOutputStream dataOutputStream = new DataOutputStream(metafileOutputStream)
+      final FileOutputStream metafileOutputStream = new FileOutputStream(metaFilePath, false);
+      final DataOutputStream dataOutputStream = new DataOutputStream(metafileOutputStream)
     ) {
       for (PartitionMetadata<K> partitionMetadata : partitionMetadataItr) {
         final byte[] key = SerializationUtils.serialize(partitionMetadata.getKey());
@@ -116,8 +117,8 @@
     }
     final List<PartitionMetadata<T>> partitionMetadataList = new ArrayList<>();
     try (
-        final FileInputStream metafileInputStream = new FileInputStream(metaFilePath);
-        final DataInputStream dataInputStream = new DataInputStream(metafileInputStream)
+      final FileInputStream metafileInputStream = new FileInputStream(metaFilePath);
+      final DataInputStream dataInputStream = new DataInputStream(metafileInputStream)
     ) {
       while (dataInputStream.available() > 0) {
         final int keyLength = dataInputStream.readInt();
@@ -127,9 +128,9 @@
         }
 
         final PartitionMetadata<T> partitionMetadata = new PartitionMetadata<>(
-            SerializationUtils.deserialize(desKey),
-            dataInputStream.readInt(),
-            dataInputStream.readLong()
+          SerializationUtils.deserialize(desKey),
+          dataInputStream.readInt(),
+          dataInputStream.readLong()
         );
         partitionMetadataList.add(partitionMetadata);
       }
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/partition/NonSerializedPartition.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/partition/NonSerializedPartition.java
index 8fa30b7..cd2bfb9 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/partition/NonSerializedPartition.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/partition/NonSerializedPartition.java
@@ -27,6 +27,7 @@
 /**
  * A collection of data elements. The data is stored as an iterable of elements.
  * This is a unit of read / write towards {@link org.apache.nemo.runtime.executor.data.block.Block}s.
+ *
  * @param <K> the key type of its partitions.
  */
 public final class NonSerializedPartition<K> implements Partition<Iterable, K> {
@@ -96,7 +97,7 @@
   /**
    * @return the number of bytes in serialized form (which is, for example, encoded and compressed)
    * @throws org.apache.nemo.runtime.executor.data.DataUtil.IteratorWithNumBytes.NumBytesNotSupportedException
-   *         when then information is not available
+   * when then information is not available
    */
   public long getNumSerializedBytes() throws DataUtil.IteratorWithNumBytes.NumBytesNotSupportedException {
     if (numSerializedBytes == -1) {
@@ -108,7 +109,7 @@
   /**
    * @return the number of bytes in encoded form (which is ready to be decoded)
    * @throws org.apache.nemo.runtime.executor.data.DataUtil.IteratorWithNumBytes.NumBytesNotSupportedException
-   *         when then information is not available
+   * when then information is not available
    */
   public long getNumEncodedBytes() throws DataUtil.IteratorWithNumBytes.NumBytesNotSupportedException {
     if (numEncodedBytes == -1) {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/partition/Partition.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/partition/Partition.java
index 7eff603..4f96ad8 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/partition/Partition.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/partition/Partition.java
@@ -23,6 +23,7 @@
 /**
  * A collection of data elements.
  * This is a unit of read / write towards {@link org.apache.nemo.runtime.executor.data.block.Block}s.
+ *
  * @param <T> the type of the data stored in this {@link Partition}.
  * @param <K> the type of key used for {@link Partition}.
  */
@@ -38,6 +39,7 @@
 
   /**
    * Commits a partition to prevent further data write.
+   *
    * @throws IOException if fail to commit partition.
    */
   void commit() throws IOException;
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/partition/SerializedPartition.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/partition/SerializedPartition.java
index 1bcde51..e2f57a5 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/partition/SerializedPartition.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/partition/SerializedPartition.java
@@ -33,6 +33,7 @@
 /**
  * A collection of data elements. The data is stored as an array of bytes.
  * This is a unit of read / write towards {@link org.apache.nemo.runtime.executor.data.block.Block}s.
+ *
  * @param <K> the key type of its partitions.
  */
 public final class SerializedPartition<K> implements Partition<byte[], K> {
@@ -43,9 +44,12 @@
   private volatile int length;
   private volatile boolean committed;
   // Will be null when the partition is committed when it is constructed.
-  @Nullable private final DirectByteArrayOutputStream bytesOutputStream;
-  @Nullable private final OutputStream wrappedStream;
-  @Nullable private final EncoderFactory.Encoder encoder;
+  @Nullable
+  private final DirectByteArrayOutputStream bytesOutputStream;
+  @Nullable
+  private final OutputStream wrappedStream;
+  @Nullable
+  private final EncoderFactory.Encoder encoder;
 
   /**
    * Creates a serialized {@link Partition} without actual data.
@@ -107,6 +111,7 @@
 
   /**
    * Commits a partition to prevent further data write.
+   *
    * @throws IOException if fail to commit partition.
    */
   @Override
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/AbstractBlockStore.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/AbstractBlockStore.java
index 1011568..170d98c 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/AbstractBlockStore.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/AbstractBlockStore.java
@@ -31,6 +31,7 @@
 
   /**
    * Constructor.
+   *
    * @param serializerManager the coder manager.
    */
   protected AbstractBlockStore(final SerializerManager serializerManager) {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/GlusterFileStore.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/GlusterFileStore.java
index a47afea..2077a05 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/GlusterFileStore.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/GlusterFileStore.java
@@ -19,13 +19,14 @@
 package org.apache.nemo.runtime.executor.data.stores;
 
 import org.apache.nemo.common.exception.BlockFetchException;
-import org.apache.nemo.conf.JobConf;
 import org.apache.nemo.common.exception.BlockWriteException;
-import org.apache.nemo.runtime.executor.data.*;
+import org.apache.nemo.conf.JobConf;
+import org.apache.nemo.runtime.executor.data.DataUtil;
+import org.apache.nemo.runtime.executor.data.SerializerManager;
 import org.apache.nemo.runtime.executor.data.block.Block;
-import org.apache.nemo.runtime.executor.data.streamchainer.Serializer;
-import org.apache.nemo.runtime.executor.data.metadata.RemoteFileMetadata;
 import org.apache.nemo.runtime.executor.data.block.FileBlock;
+import org.apache.nemo.runtime.executor.data.metadata.RemoteFileMetadata;
+import org.apache.nemo.runtime.executor.data.streamchainer.Serializer;
 import org.apache.reef.tang.annotations.Parameter;
 
 import javax.annotation.concurrent.ThreadSafe;
@@ -68,7 +69,7 @@
     final Serializer serializer = getSerializerFromWorker(blockId);
     final String filePath = DataUtil.blockIdToFilePath(blockId, fileDirectory);
     final RemoteFileMetadata metadata =
-        RemoteFileMetadata.create(DataUtil.blockIdToMetaFilePath(blockId, fileDirectory));
+      RemoteFileMetadata.create(DataUtil.blockIdToMetaFilePath(blockId, fileDirectory));
     return new FileBlock<>(blockId, serializer, filePath, metadata);
   }
 
@@ -82,7 +83,7 @@
   public void writeBlock(final Block block) throws BlockWriteException {
     if (!(block instanceof FileBlock)) {
       throw new BlockWriteException(new Throwable(
-          this.toString() + " only accept " + FileBlock.class.getName()));
+        this.toString() + " only accept " + FileBlock.class.getName()));
     } else if (!block.isCommitted()) {
       throw new BlockWriteException(new Throwable("The block " + block.getId() + "is not committed yet."));
     }
@@ -149,7 +150,7 @@
     final Serializer serializer = getSerializerFromWorker(blockId);
     final String filePath = DataUtil.blockIdToFilePath(blockId, fileDirectory);
     final RemoteFileMetadata<K> metadata =
-        RemoteFileMetadata.open(DataUtil.blockIdToMetaFilePath(blockId, fileDirectory));
+      RemoteFileMetadata.open(DataUtil.blockIdToMetaFilePath(blockId, fileDirectory));
     return new FileBlock<>(blockId, serializer, filePath, metadata);
   }
 }
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/LocalFileStore.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/LocalFileStore.java
index 62758aa..0b22dae 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/LocalFileStore.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/LocalFileStore.java
@@ -21,16 +21,18 @@
 import org.apache.nemo.common.exception.BlockFetchException;
 import org.apache.nemo.common.exception.BlockWriteException;
 import org.apache.nemo.conf.JobConf;
-import org.apache.nemo.runtime.executor.data.*;
+import org.apache.nemo.runtime.executor.data.DataUtil;
+import org.apache.nemo.runtime.executor.data.SerializerManager;
 import org.apache.nemo.runtime.executor.data.block.Block;
-import org.apache.nemo.runtime.executor.data.streamchainer.Serializer;
-import org.apache.nemo.runtime.executor.data.metadata.LocalFileMetadata;
 import org.apache.nemo.runtime.executor.data.block.FileBlock;
+import org.apache.nemo.runtime.executor.data.metadata.LocalFileMetadata;
+import org.apache.nemo.runtime.executor.data.streamchainer.Serializer;
 import org.apache.reef.tang.annotations.Parameter;
 
 import javax.annotation.concurrent.ThreadSafe;
 import javax.inject.Inject;
-import java.io.*;
+import java.io.File;
+import java.io.IOException;
 
 /**
  * Stores blocks in local files.
@@ -42,8 +44,8 @@
   /**
    * Constructor.
    *
-   * @param fileDirectory the directory which will contain the files.
-   * @param serializerManager  the serializer manager.
+   * @param fileDirectory     the directory which will contain the files.
+   * @param serializerManager the serializer manager.
    */
   @Inject
   private LocalFileStore(@Parameter(JobConf.FileDirectory.class) final String fileDirectory,
@@ -73,7 +75,7 @@
   public void writeBlock(final Block block) throws BlockWriteException {
     if (!(block instanceof FileBlock)) {
       throw new BlockWriteException(new Throwable(
-          this.toString() + "only accept " + FileBlock.class.getName()));
+        this.toString() + "only accept " + FileBlock.class.getName()));
     } else if (!block.isCommitted()) {
       throw new BlockWriteException(new Throwable("The block " + block.getId() + "is not committed yet."));
     } else {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/MemoryStore.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/MemoryStore.java
index 9a55bd7..00346c4 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/MemoryStore.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/MemoryStore.java
@@ -63,7 +63,7 @@
   public void writeBlock(final Block block) throws BlockWriteException {
     if (!(block instanceof NonSerializedMemoryBlock)) {
       throw new BlockWriteException(new Throwable(
-          this.toString() + "only accept " + NonSerializedPartition.class.getName()));
+        this.toString() + "only accept " + NonSerializedPartition.class.getName()));
     } else if (!block.isCommitted()) {
       throw new BlockWriteException(new Throwable("The block " + block.getId() + "is not committed yet."));
     } else {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/SerializedMemoryStore.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/SerializedMemoryStore.java
index 18c1389..f5298be 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/SerializedMemoryStore.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/stores/SerializedMemoryStore.java
@@ -35,6 +35,7 @@
 
   /**
    * Constructor.
+   *
    * @param serializerManager the serializer manager.
    */
   @Inject
@@ -61,7 +62,7 @@
   public void writeBlock(final Block block) throws BlockWriteException {
     if (!(block instanceof SerializedMemoryBlock)) {
       throw new BlockWriteException(new Throwable(
-          this.toString() + "only accept " + SerializedMemoryBlock.class.getName()));
+        this.toString() + "only accept " + SerializedMemoryBlock.class.getName()));
     } else if (!block.isCommitted()) {
       throw new BlockWriteException(new Throwable("The block " + block.getId() + "is not committed yet."));
     } else {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/streamchainer/CompressionStreamChainer.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/streamchainer/CompressionStreamChainer.java
index b1d5d48..d879667 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/streamchainer/CompressionStreamChainer.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/streamchainer/CompressionStreamChainer.java
@@ -18,9 +18,9 @@
  */
 package org.apache.nemo.runtime.executor.data.streamchainer;
 
+import net.jpountz.lz4.LZ4BlockOutputStream;
 import org.apache.nemo.common.exception.UnsupportedCompressionException;
 import org.apache.nemo.common.ir.edge.executionproperty.CompressionProperty;
-import net.jpountz.lz4.LZ4BlockOutputStream;
 
 import java.io.IOException;
 import java.io.OutputStream;
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/streamchainer/DecompressionStreamChainer.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/streamchainer/DecompressionStreamChainer.java
index 03610ef..6bc4b62 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/streamchainer/DecompressionStreamChainer.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/streamchainer/DecompressionStreamChainer.java
@@ -18,9 +18,9 @@
  */
 package org.apache.nemo.runtime.executor.data.streamchainer;
 
+import net.jpountz.lz4.LZ4BlockInputStream;
 import org.apache.nemo.common.exception.UnsupportedCompressionException;
 import org.apache.nemo.common.ir.edge.executionproperty.CompressionProperty;
-import net.jpountz.lz4.LZ4BlockInputStream;
 
 import java.io.IOException;
 import java.io.InputStream;
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/streamchainer/Serializer.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/streamchainer/Serializer.java
index 8ec373a..3d1b46f 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/streamchainer/Serializer.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/streamchainer/Serializer.java
@@ -25,6 +25,7 @@
 
 /**
  * class that contains {@link EncoderFactory}, {@link DecoderFactory} and {@link List} of {@link EncodeStreamChainer}.
+ *
  * @param <E> encoderFactory element type.
  * @param <D> decoderFactory element type.
  */
@@ -37,8 +38,8 @@
   /**
    * Constructor.
    *
-   * @param encoderFactory              {@link EncoderFactory}.
-   * @param decoderFactory              {@link DecoderFactory}.
+   * @param encoderFactory       {@link EncoderFactory}.
+   * @param decoderFactory       {@link DecoderFactory}.
    * @param encodeStreamChainers the list of {@link EncodeStreamChainer} to use for encoding.
    * @param decodeStreamChainers the list of {@link DecodeStreamChainer} to use for decoding.
    */
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/BlockInputReader.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/BlockInputReader.java
index 8a0dadd..4640b03 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/BlockInputReader.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/BlockInputReader.java
@@ -22,7 +22,10 @@
 import org.apache.nemo.common.KeyRange;
 import org.apache.nemo.common.exception.BlockFetchException;
 import org.apache.nemo.common.exception.UnsupportedCommPatternException;
-import org.apache.nemo.common.ir.edge.executionproperty.*;
+import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
+import org.apache.nemo.common.ir.edge.executionproperty.DataStoreProperty;
+import org.apache.nemo.common.ir.edge.executionproperty.DuplicateEdgeGroupProperty;
+import org.apache.nemo.common.ir.edge.executionproperty.DuplicateEdgeGroupPropertyValue;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.plan.RuntimeEdge;
@@ -30,7 +33,10 @@
 import org.apache.nemo.runtime.executor.data.BlockManagerWorker;
 import org.apache.nemo.runtime.executor.data.DataUtil;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
 import java.util.concurrent.CompletableFuture;
 
 /**
@@ -80,6 +86,7 @@
 
   /**
    * See {@link RuntimeIdManager#generateBlockIdWildcard(String, int)} for information on block wildcards.
+   *
    * @param producerTaskIndex to use.
    * @return wildcard block id that corresponds to "ANY" task attempt of the task index.
    */
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/BlockOutputWriter.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/BlockOutputWriter.java
index 198cfd8..1fb4f46 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/BlockOutputWriter.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/BlockOutputWriter.java
@@ -21,13 +21,14 @@
 import org.apache.nemo.common.ir.edge.executionproperty.*;
 import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.vertex.executionproperty.ParallelismProperty;
+import org.apache.nemo.common.partitioner.DedicatedKeyPerElement;
+import org.apache.nemo.common.partitioner.Partitioner;
 import org.apache.nemo.common.punctuation.Watermark;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.plan.RuntimeEdge;
 import org.apache.nemo.runtime.common.plan.StageEdge;
 import org.apache.nemo.runtime.executor.data.BlockManagerWorker;
 import org.apache.nemo.runtime.executor.data.block.Block;
-import org.apache.nemo.common.partitioner.*;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -54,10 +55,10 @@
   /**
    * Constructor.
    *
-   * @param srcTaskId           the id of the source task.
-   * @param dstIrVertex         the destination IR vertex.
-   * @param runtimeEdge         the {@link RuntimeEdge}.
-   * @param blockManagerWorker  the {@link BlockManagerWorker}.
+   * @param srcTaskId          the id of the source task.
+   * @param dstIrVertex        the destination IR vertex.
+   * @param runtimeEdge        the {@link RuntimeEdge}.
+   * @param blockManagerWorker the {@link BlockManagerWorker}.
    */
   BlockOutputWriter(final String srcTaskId,
                     final IRVertex dstIrVertex,
@@ -72,12 +73,12 @@
     this.blockStoreValue = runtimeEdge.getPropertyValue(DataStoreProperty.class)
       .orElseThrow(() -> new RuntimeException("No data store property on the edge"));
     blockToWrite = blockManagerWorker.createBlock(
-        RuntimeIdManager.generateBlockId(runtimeEdge.getId(), srcTaskId), blockStoreValue);
+      RuntimeIdManager.generateBlockId(runtimeEdge.getId(), srcTaskId), blockStoreValue);
     final Optional<DuplicateEdgeGroupPropertyValue> duplicateDataProperty =
-        runtimeEdge.getPropertyValue(DuplicateEdgeGroupProperty.class);
+      runtimeEdge.getPropertyValue(DuplicateEdgeGroupProperty.class);
     nonDummyBlock = !duplicateDataProperty.isPresent()
-        || duplicateDataProperty.get().getRepresentativeEdgeId().equals(runtimeEdge.getId())
-        || duplicateDataProperty.get().getGroupSize() <= 1;
+      || duplicateDataProperty.get().getRepresentativeEdgeId().equals(runtimeEdge.getId())
+      || duplicateDataProperty.get().getGroupSize() <= 1;
   }
 
   @Override
@@ -86,7 +87,7 @@
       blockToWrite.write(partitioner.partition(element), element);
 
       final DedicatedKeyPerElement dedicatedKeyPerElement =
-          partitioner.getClass().getAnnotation(DedicatedKeyPerElement.class);
+        partitioner.getClass().getAnnotation(DedicatedKeyPerElement.class);
       if (dedicatedKeyPerElement != null) {
         blockToWrite.commitPartitions();
       }
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/DataFetcherOutputCollector.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/DataFetcherOutputCollector.java
index 9995e6a..2012dee 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/DataFetcherOutputCollector.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/DataFetcherOutputCollector.java
@@ -26,6 +26,7 @@
 
 /**
  * This collector receives data from DataFetcher and forwards it to the next operator.
+ *
  * @param <O> output type.
  */
 public final class DataFetcherOutputCollector<O> implements OutputCollector<O> {
@@ -36,9 +37,10 @@
 
   /**
    * It forwards output to the next operator.
+   *
    * @param nextOperatorVertex next operator to emit data and watermark
-   * @param edgeIndex edge index
-   * @param watermarkManager watermark manager
+   * @param edgeIndex          edge index
+   * @param watermarkManager   watermark manager
    */
   public DataFetcherOutputCollector(final OperatorVertex nextOperatorVertex,
                                     final int edgeIndex,
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/InputReader.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/InputReader.java
index e6f7657..356d7f3 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/InputReader.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/InputReader.java
@@ -22,7 +22,7 @@
 import org.apache.nemo.common.ir.vertex.executionproperty.ParallelismProperty;
 import org.apache.nemo.runtime.executor.data.DataUtil;
 
-import java.util.*;
+import java.util.List;
 import java.util.concurrent.CompletableFuture;
 
 /**
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/InputWatermarkManager.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/InputWatermarkManager.java
index adbb659..479763b 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/InputWatermarkManager.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/InputWatermarkManager.java
@@ -35,12 +35,13 @@
    * if multiple threads access this method concurrently.
    * Ex)
    * -- input stream1 (edge 1):  ---------- ts: 3 ------------------ts: 6
-   *                                                                 ^^^
-   *                                                              emit ts: 4 (edge 2) watermark at this time
+   * ^^^
+   * emit ts: 4 (edge 2) watermark at this time
    * -- input stream2 (edge 2):  ----------------- ts: 4------
-   *                                                 ^^^
-   *                                             emit ts: 3 (edge 1) watermark at this time
+   * ^^^
+   * emit ts: 3 (edge 1) watermark at this time
    * -- input stream3 (edge 3):  ------- ts: 5 ---------------
+   *
    * @param edgeIndex incoming edge index
    * @param watermark watermark emitted from the edge
    */
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/MultiInputWatermarkManager.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/MultiInputWatermarkManager.java
index 0402fd4..d4c0b93 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/MultiInputWatermarkManager.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/MultiInputWatermarkManager.java
@@ -35,6 +35,7 @@
   private final List<Watermark> watermarks;
   private final OutputCollector<?> watermarkCollector;
   private int minWatermarkIndex;
+
   public MultiInputWatermarkManager(final int numEdges,
                                     final OutputCollector<?> watermarkCollector) {
     super();
@@ -71,7 +72,7 @@
       // update min watermark
       final Watermark prevMinWatermark = watermarks.get(minWatermarkIndex);
       watermarks.set(minWatermarkIndex, watermark);
-       // find min watermark
+      // find min watermark
       minWatermarkIndex = findNextMinWatermarkIndex();
       final Watermark minWatermark = watermarks.get(minWatermarkIndex);
 
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/NemoEventEncoderFactory.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/NemoEventEncoderFactory.java
index d9b0836..1c7ec99 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/NemoEventEncoderFactory.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/NemoEventEncoderFactory.java
@@ -53,6 +53,7 @@
 
   /**
    * This encodes normal data and WatermarkWithIndex.
+   *
    * @param <T>
    */
   private final class NemoEventEncoder<T> implements EncoderFactory.Encoder<T> {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/NextIntraTaskOperatorInfo.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/NextIntraTaskOperatorInfo.java
index 1525261..233bb73 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/NextIntraTaskOperatorInfo.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/NextIntraTaskOperatorInfo.java
@@ -25,7 +25,7 @@
  * -- edgeIndex: the index of edge to next operator.
  * -- nextOperator: next operator vertex
  * -- watermarkManager: next operator's watermark manager
- *
+ * <p>
  * ex)
  * --edge (index 0)--&gt;
  * --edge (index 1)--&gt;  watermarkManager --&gt; nextOperator
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OperatorVertexOutputCollector.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OperatorVertexOutputCollector.java
index b1775f7..ca5a2bc 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OperatorVertexOutputCollector.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OperatorVertexOutputCollector.java
@@ -25,7 +25,8 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.List;
+import java.util.Map;
 
 /**
  * OutputCollector implementation.
@@ -48,10 +49,11 @@
 
   /**
    * Constructor of the output collector.
-   * @param irVertex the ir vertex that emits the output
-   * @param internalMainOutputs internal main outputs
+   *
+   * @param irVertex                  the ir vertex that emits the output
+   * @param internalMainOutputs       internal main outputs
    * @param internalAdditionalOutputs internal additional outputs
-   * @param externalMainOutputs external main outputs
+   * @param externalMainOutputs       external main outputs
    * @param externalAdditionalOutputs external additional outputs
    */
   public OperatorVertexOutputCollector(
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OperatorWatermarkCollector.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OperatorWatermarkCollector.java
index 66efb72..319a30f 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OperatorWatermarkCollector.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OperatorWatermarkCollector.java
@@ -23,6 +23,7 @@
 import org.apache.nemo.common.punctuation.Watermark;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 /**
  * This class is used for collecting watermarks for an OperatorVertex.
  * InputWatermarkManager emits watermarks to this class.
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OutputWriter.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OutputWriter.java
index c976cf4..79efa69 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OutputWriter.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OutputWriter.java
@@ -20,7 +20,7 @@
 
 import org.apache.nemo.common.punctuation.Watermark;
 
-import java.util.*;
+import java.util.Optional;
 
 /**
  * Represents the output data transfer from a task.
@@ -35,6 +35,7 @@
 
   /**
    * Writes watermarks to all edges.
+   *
    * @param watermark watermark
    */
   void writeWatermark(final Watermark watermark);
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/PipeOutputWriter.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/PipeOutputWriter.java
index 239efc6..004deb4 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/PipeOutputWriter.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/PipeOutputWriter.java
@@ -19,13 +19,13 @@
 package org.apache.nemo.runtime.executor.datatransfer;
 
 import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
+import org.apache.nemo.common.partitioner.Partitioner;
 import org.apache.nemo.common.punctuation.Watermark;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.plan.RuntimeEdge;
 import org.apache.nemo.runtime.common.plan.StageEdge;
 import org.apache.nemo.runtime.executor.bytetransfer.ByteOutputContext;
 import org.apache.nemo.runtime.executor.data.PipeManagerWorker;
-import org.apache.nemo.common.partitioner.Partitioner;
 import org.apache.nemo.runtime.executor.data.streamchainer.Serializer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -55,9 +55,9 @@
   /**
    * Constructor.
    *
-   * @param srcTaskId           the id of the source task.
-   * @param runtimeEdge         the {@link RuntimeEdge}.
-   * @param pipeManagerWorker   the pipe manager.
+   * @param srcTaskId         the id of the source task.
+   * @param runtimeEdge       the {@link RuntimeEdge}.
+   * @param pipeManagerWorker the pipe manager.
    */
   PipeOutputWriter(final String srcTaskId,
                    final RuntimeEdge runtimeEdge,
@@ -85,6 +85,7 @@
 
   /**
    * Writes output element.
+   *
    * @param element the element to write.
    */
   @Override
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/SingleInputWatermarkManager.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/SingleInputWatermarkManager.java
index e3562ed..6715a1b 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/SingleInputWatermarkManager.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/SingleInputWatermarkManager.java
@@ -38,6 +38,7 @@
 
   /**
    * This just forwards watermarks to the next operator because it has one data stream.
+   *
    * @param edgeIndex edge index
    * @param watermark watermark
    */
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/DataFetcher.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/DataFetcher.java
index 215a6a8..12121c9 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/DataFetcher.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/DataFetcher.java
@@ -32,7 +32,7 @@
   private final OutputCollector outputCollector;
 
   /**
-   * @param dataSource to fetch from.
+   * @param dataSource      to fetch from.
    * @param outputCollector for the data fetched.
    */
   DataFetcher(final IRVertex dataSource,
@@ -43,8 +43,9 @@
 
   /**
    * Can block until the next data element becomes available.
+   *
    * @return data element
-   * @throws IOException upon I/O error
+   * @throws IOException                      upon I/O error
    * @throws java.util.NoSuchElementException if no more element is available
    */
   abstract Object fetchDataElement() throws IOException, NoSuchElementException;
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/MultiThreadParentTaskDataFetcher.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/MultiThreadParentTaskDataFetcher.java
index 7ce1ed9..6bde9a2 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/MultiThreadParentTaskDataFetcher.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/MultiThreadParentTaskDataFetcher.java
@@ -31,14 +31,17 @@
 import java.io.IOException;
 import java.util.List;
 import java.util.NoSuchElementException;
-import java.util.concurrent.*;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
 
 /**
  * Task thread -> fetchDataElement() -> (((QUEUE))) <- List of iterators <- queueInsertionThreads
- *
+ * <p>
  * Unlike {@link ParentTaskDataFetcher}, where the task thread directly consumes (and blocks on) iterators one by one,
  * this class spawns threads that each forwards elements from an iterator to a global queue.
- *
+ * <p>
  * This class should be used when dealing with unbounded data streams, as we do not want to be blocked on a
  * single unbounded iterator forever.
  */
@@ -180,10 +183,12 @@
     public void emit(final Object output) {
       throw new IllegalStateException("Should not be called");
     }
+
     @Override
     public void emitWatermark(final Watermark watermark) {
       elementQueue.offer(watermark);
     }
+
     @Override
     public void emit(final String dstVertexId, final Object output) {
       throw new IllegalStateException("Should not be called");
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/SourceVertexDataFetcher.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/SourceVertexDataFetcher.java
index b42bd77..9ec850a 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/SourceVertexDataFetcher.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/SourceVertexDataFetcher.java
@@ -21,8 +21,8 @@
 import org.apache.nemo.common.ir.OutputCollector;
 import org.apache.nemo.common.ir.Readable;
 import org.apache.nemo.common.ir.vertex.SourceVertex;
-import org.apache.nemo.common.punctuation.Watermark;
 import org.apache.nemo.common.punctuation.Finishmark;
+import org.apache.nemo.common.punctuation.Watermark;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -65,6 +65,7 @@
 
   /**
    * This is non-blocking operation.
+   *
    * @return current data
    * @throws NoSuchElementException if the current data is not available
    */
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/TaskExecutor.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/TaskExecutor.java
index 6dee5cf..18a02de 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/TaskExecutor.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/TaskExecutor.java
@@ -19,44 +19,41 @@
 package org.apache.nemo.runtime.executor.task;
 
 import com.google.common.collect.Lists;
+import org.apache.commons.lang3.SerializationUtils;
+import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.nemo.common.Pair;
 import org.apache.nemo.common.dag.DAG;
 import org.apache.nemo.common.dag.Edge;
 import org.apache.nemo.common.ir.OutputCollector;
 import org.apache.nemo.common.ir.Readable;
 import org.apache.nemo.common.ir.edge.executionproperty.AdditionalOutputTagProperty;
-import org.apache.nemo.common.ir.vertex.*;
+import org.apache.nemo.common.ir.vertex.IRVertex;
+import org.apache.nemo.common.ir.vertex.OperatorVertex;
+import org.apache.nemo.common.ir.vertex.SourceVertex;
 import org.apache.nemo.common.ir.vertex.transform.MessageAggregatorTransform;
 import org.apache.nemo.common.ir.vertex.transform.Transform;
-import org.apache.nemo.common.punctuation.Watermark;
-import org.apache.nemo.runtime.executor.datatransfer.MultiInputWatermarkManager;
 import org.apache.nemo.common.punctuation.Finishmark;
+import org.apache.nemo.common.punctuation.Watermark;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.message.MessageEnvironment;
 import org.apache.nemo.runtime.common.message.PersistentConnectionToMasterMap;
-import org.apache.nemo.runtime.common.plan.Task;
-import org.apache.nemo.runtime.common.plan.StageEdge;
 import org.apache.nemo.runtime.common.plan.RuntimeEdge;
+import org.apache.nemo.runtime.common.plan.StageEdge;
+import org.apache.nemo.runtime.common.plan.Task;
 import org.apache.nemo.runtime.common.state.TaskState;
 import org.apache.nemo.runtime.executor.MetricMessageSender;
 import org.apache.nemo.runtime.executor.TaskStateManager;
 import org.apache.nemo.runtime.executor.TransformContextImpl;
 import org.apache.nemo.runtime.executor.data.BroadcastManagerWorker;
 import org.apache.nemo.runtime.executor.datatransfer.*;
-
-import java.io.IOException;
-import java.util.*;
-import java.util.stream.Collectors;
-
-import org.apache.commons.lang3.SerializationUtils;
-import org.apache.commons.lang3.exception.ExceptionUtils;
-import org.apache.nemo.runtime.executor.datatransfer.RunTimeMessageOutputCollector;
-import org.apache.nemo.runtime.executor.datatransfer.OperatorVertexOutputCollector;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import javax.annotation.concurrent.NotThreadSafe;
+import java.io.IOException;
+import java.util.*;
+import java.util.stream.Collectors;
 
 /**
  * Executes a task.
@@ -88,12 +85,12 @@
   /**
    * Constructor.
    *
-   * @param task                   Task with information needed during execution.
-   * @param irVertexDag            A DAG of vertices.
-   * @param taskStateManager       State manager for this Task.
-   * @param intermediateDataIOFactory    For reading from/writing to data to other tasks.
-   * @param broadcastManagerWorker For broadcasts.
-   * @param metricMessageSender    For sending metric with execution stats to the master.
+   * @param task                            Task with information needed during execution.
+   * @param irVertexDag                     A DAG of vertices.
+   * @param taskStateManager                State manager for this Task.
+   * @param intermediateDataIOFactory       For reading from/writing to data to other tasks.
+   * @param broadcastManagerWorker          For broadcasts.
+   * @param metricMessageSender             For sending metric with execution stats to the master.
    * @param persistentConnectionToMasterMap For sending messages to the master.
    */
   public TaskExecutor(final Task task,
@@ -156,8 +153,8 @@
    * This means that overheads associated with jumping from one harness to the other should be minimal.
    * For example, we should never perform an expensive hash operation to traverse the harnesses.
    *
-   * @param task        task.
-   * @param irVertexDag dag.
+   * @param task                      task.
+   * @param irVertexDag               dag.
    * @param intermediateDataIOFactory intermediate IO.
    * @return fetchers and harnesses.
    */
@@ -373,7 +370,8 @@
   /**
    * Process an event generated from the dataFetcher.
    * If the event is an instance of Finishmark, we remove the dataFetcher from the current list.
-   * @param event event
+   *
+   * @param event       event
    * @param dataFetcher current data fetcher
    */
   private void onEventFromDataFetcher(final Object event,
@@ -400,9 +398,10 @@
 
   /**
    * Check if it is time to poll pending fetchers' data.
+   *
    * @param pollingPeriod polling period
-   * @param currentTime current time
-   * @param prevTime prev time
+   * @param currentTime   current time
+   * @param prevTime      prev time
    */
   private boolean isPollingTime(final long pollingPeriod,
                                 final long currentTime,
@@ -413,15 +412,15 @@
   /**
    * This retrieves data from data fetchers and process them.
    * It maintains two lists:
-   *  -- availableFetchers: maintain data fetchers that currently have data elements to retreive
-   *  -- pendingFetchers: maintain data fetchers that currently do not have available elements.
-   *     This can become available in the future, and therefore we check the pending fetchers every pollingInterval.
-   *
-   *  If a data fetcher finishes, we remove it from the two lists.
-   *  If a data fetcher has no available element, we move the data fetcher to pendingFetchers
-   *  If a pending data fetcher has element, we move it to availableFetchers
-   *  If there are no available fetchers but pending fetchers, sleep for pollingPeriod
-   *  and retry fetching data from the pendingFetchers.
+   * -- availableFetchers: maintain data fetchers that currently have data elements to retreive
+   * -- pendingFetchers: maintain data fetchers that currently do not have available elements.
+   * This can become available in the future, and therefore we check the pending fetchers every pollingInterval.
+   * <p>
+   * If a data fetcher finishes, we remove it from the two lists.
+   * If a data fetcher has no available element, we move the data fetcher to pendingFetchers
+   * If a pending data fetcher has element, we move it to availableFetchers
+   * If there are no available fetchers but pending fetchers, sleep for pollingPeriod
+   * and retry fetching data from the pendingFetchers.
    *
    * @param fetchers to handle.
    * @return false if IOException.
@@ -548,11 +547,11 @@
    * Return a map of Internal Outputs associated with their output tag.
    * If an edge has no output tag, its info are added to the mainOutputTag.
    *
-   * @param irVertex source irVertex
-   * @param irVertexDag DAG of IRVertex and RuntimeEdge
-   * @param edgeIndexMap Map of edge and index
+   * @param irVertex                    source irVertex
+   * @param irVertexDag                 DAG of IRVertex and RuntimeEdge
+   * @param edgeIndexMap                Map of edge and index
    * @param operatorWatermarkManagerMap Map of irVertex and InputWatermarkManager
-   * @return Map<OutputTag, List<NextIntraTaskOperatorInfo>>
+   * @return The map of output tag to the list of next intra-task operator information.
    */
   private Map<String, List<NextIntraTaskOperatorInfo>> getInternalOutputMap(
     final IRVertex irVertex,
@@ -565,18 +564,18 @@
     irVertexDag.getOutgoingEdgesOf(irVertex.getId())
       .stream()
       .map(edge -> {
-          final boolean isPresent = edge.getPropertyValue(AdditionalOutputTagProperty.class).isPresent();
-          final String outputTag;
-          if (isPresent) {
-            outputTag = edge.getPropertyValue(AdditionalOutputTagProperty.class).get();
-          } else {
-            outputTag = AdditionalOutputTagProperty.getMainOutputTag();
-          }
-          final int index = edgeIndexMap.get(edge);
-          final OperatorVertex nextOperator = (OperatorVertex) edge.getDst();
-          final InputWatermarkManager inputWatermarkManager = operatorWatermarkManagerMap.get(nextOperator);
-          return Pair.of(outputTag, new NextIntraTaskOperatorInfo(index, nextOperator, inputWatermarkManager));
-        })
+        final boolean isPresent = edge.getPropertyValue(AdditionalOutputTagProperty.class).isPresent();
+        final String outputTag;
+        if (isPresent) {
+          outputTag = edge.getPropertyValue(AdditionalOutputTagProperty.class).get();
+        } else {
+          outputTag = AdditionalOutputTagProperty.getMainOutputTag();
+        }
+        final int index = edgeIndexMap.get(edge);
+        final OperatorVertex nextOperator = (OperatorVertex) edge.getDst();
+        final InputWatermarkManager inputWatermarkManager = operatorWatermarkManagerMap.get(nextOperator);
+        return Pair.of(outputTag, new NextIntraTaskOperatorInfo(index, nextOperator, inputWatermarkManager));
+      })
       .forEach(pair -> {
         map.putIfAbsent(pair.left(), new ArrayList<>());
         map.get(pair.left()).add(pair.right());
@@ -588,9 +587,9 @@
   /**
    * Return inter-task OutputWriters, for single output or output associated with main tag.
    *
-   * @param irVertex                source irVertex
-   * @param outEdgesToChildrenTasks outgoing edges to child tasks
-   * @param intermediateDataIOFactory     intermediateDataIOFactory
+   * @param irVertex                  source irVertex
+   * @param outEdgesToChildrenTasks   outgoing edges to child tasks
+   * @param intermediateDataIOFactory intermediateDataIOFactory
    * @return OutputWriters for main children tasks
    */
   private List<OutputWriter> getExternalMainOutputs(final IRVertex irVertex,
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/VertexHarness.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/VertexHarness.java
index 2c54f96..113f6bf 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/VertexHarness.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/task/VertexHarness.java
@@ -25,7 +25,8 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.List;
+import java.util.Map;
 
 /**
  * Captures the relationship between a non-source IRVertex's outputCollector, and mainTagChildren vertices.
diff --git a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/MetricFlushTest.java b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/MetricFlushTest.java
index ee4b511..9e6c085 100644
--- a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/MetricFlushTest.java
+++ b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/MetricFlushTest.java
@@ -19,7 +19,10 @@
 package org.apache.nemo.runtime.executor;
 
 import org.apache.nemo.runtime.common.comm.ControlMessage;
-import org.apache.nemo.runtime.common.message.*;
+import org.apache.nemo.runtime.common.message.MessageContext;
+import org.apache.nemo.runtime.common.message.MessageEnvironment;
+import org.apache.nemo.runtime.common.message.MessageListener;
+import org.apache.nemo.runtime.common.message.MessageSender;
 import org.apache.nemo.runtime.common.message.local.LocalMessageDispatcher;
 import org.apache.nemo.runtime.common.message.local.LocalMessageEnvironment;
 import org.apache.nemo.runtime.master.metric.MetricManagerMaster;
@@ -67,7 +70,7 @@
     final MessageEnvironment workerMessageEnvironment = workerInjector.getInstance(MessageEnvironment.class);
 
     final MessageSender masterToWorkerSender = masterMessageEnvironment
-        .asyncConnect(WORKER, MessageEnvironment.EXECUTOR_MESSAGE_LISTENER_ID).get();
+      .asyncConnect(WORKER, MessageEnvironment.EXECUTOR_MESSAGE_LISTENER_ID).get();
 
     final Set<ExecutorRepresenter> executorRepresenterSet = new HashSet<>();
 
@@ -88,7 +91,7 @@
     final MetricManagerWorker metricManagerWorker = workerInjector.getInstance(MetricManagerWorker.class);
 
     masterMessageEnvironment.setupListener(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID,
-        new MessageListener<Object>() {
+      new MessageListener<Object>() {
         @Override
         public void onMessage(Object message) {
           latch.countDown();
@@ -97,19 +100,19 @@
         @Override
         public void onMessageWithContext(Object message, MessageContext messageContext) {
         }
-    });
+      });
 
     workerMessageEnvironment.setupListener(MessageEnvironment.EXECUTOR_MESSAGE_LISTENER_ID,
-        new MessageListener<Object>() {
-          @Override
-          public void onMessage(Object message) {
-            metricManagerWorker.flush();
-          }
+      new MessageListener<Object>() {
+        @Override
+        public void onMessage(Object message) {
+          metricManagerWorker.flush();
+        }
 
-          @Override
-          public void onMessageWithContext(Object message, MessageContext messageContext) {
-          }
-        });
+        @Override
+        public void onMessageWithContext(Object message, MessageContext messageContext) {
+        }
+      });
 
     metricManagerMaster.sendMetricFlushRequest();
 
diff --git a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/TransformContextImplTest.java b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/TransformContextImplTest.java
index ecac27e..421a2b1 100644
--- a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/TransformContextImplTest.java
+++ b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/TransformContextImplTest.java
@@ -26,9 +26,7 @@
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.*;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
diff --git a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/data/BlockStoreTest.java b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/data/BlockStoreTest.java
index 3ae2d0b..cb3797f 100644
--- a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/data/BlockStoreTest.java
+++ b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/data/BlockStoreTest.java
@@ -18,27 +18,30 @@
  */
 package org.apache.nemo.runtime.executor.data;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.nemo.common.HashRange;
+import org.apache.nemo.common.KeyRange;
 import org.apache.nemo.common.Pair;
-import org.apache.nemo.common.coder.*;
+import org.apache.nemo.common.coder.IntDecoderFactory;
+import org.apache.nemo.common.coder.IntEncoderFactory;
+import org.apache.nemo.common.coder.PairDecoderFactory;
+import org.apache.nemo.common.coder.PairEncoderFactory;
 import org.apache.nemo.common.ir.IdManager;
 import org.apache.nemo.common.ir.edge.executionproperty.CompressionProperty;
 import org.apache.nemo.conf.JobConf;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
-import org.apache.nemo.common.KeyRange;
 import org.apache.nemo.runtime.common.message.MessageEnvironment;
 import org.apache.nemo.runtime.common.message.local.LocalMessageDispatcher;
 import org.apache.nemo.runtime.common.message.local.LocalMessageEnvironment;
 import org.apache.nemo.runtime.common.state.BlockState;
 import org.apache.nemo.runtime.executor.data.block.Block;
 import org.apache.nemo.runtime.executor.data.partition.NonSerializedPartition;
-import org.apache.nemo.runtime.executor.data.streamchainer.DecompressionStreamChainer;
-import org.apache.nemo.runtime.executor.data.streamchainer.CompressionStreamChainer;
-import org.apache.nemo.runtime.executor.data.streamchainer.Serializer;
 import org.apache.nemo.runtime.executor.data.stores.*;
+import org.apache.nemo.runtime.executor.data.streamchainer.CompressionStreamChainer;
+import org.apache.nemo.runtime.executor.data.streamchainer.DecompressionStreamChainer;
+import org.apache.nemo.runtime.executor.data.streamchainer.Serializer;
 import org.apache.nemo.runtime.master.BlockManagerMaster;
 import org.apache.nemo.runtime.master.RuntimeMaster;
-import org.apache.commons.io.FileUtils;
 import org.apache.reef.tang.Injector;
 import org.apache.reef.tang.Tang;
 import org.apache.reef.tang.exceptions.InjectionException;
@@ -54,7 +57,10 @@
 import java.util.Collections;
 import java.util.List;
 import java.util.Optional;
-import java.util.concurrent.*;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
 import java.util.stream.Collectors;
 import java.util.stream.IntStream;
 import java.util.stream.Stream;
@@ -75,10 +81,10 @@
 public final class BlockStoreTest {
   private static final String TMP_FILE_DIRECTORY = "./tmpFiles";
   private static final Serializer SERIALIZER = new Serializer(
-      PairEncoderFactory.of(IntEncoderFactory.of(), IntEncoderFactory.of()),
-      PairDecoderFactory.of(IntDecoderFactory.of(), IntDecoderFactory.of()),
-      Collections.singletonList(new CompressionStreamChainer(CompressionProperty.Value.LZ4)),
-      Collections.singletonList(new DecompressionStreamChainer(CompressionProperty.Value.LZ4)));
+    PairEncoderFactory.of(IntEncoderFactory.of(), IntEncoderFactory.of()),
+    PairDecoderFactory.of(IntDecoderFactory.of(), IntDecoderFactory.of()),
+    Collections.singletonList(new CompressionStreamChainer(CompressionProperty.Value.LZ4)),
+    Collections.singletonList(new DecompressionStreamChainer(CompressionProperty.Value.LZ4)));
   private static final SerializerManager serializerManager = mock(SerializerManager.class);
   private BlockManagerMaster blockManagerMaster;
   private Injector baseInjector;
@@ -114,7 +120,7 @@
   public void setUp() throws Exception {
     baseInjector = LocalMessageDispatcher.getInjector();
     final Injector injector = LocalMessageEnvironment
-        .forkInjector(baseInjector, MessageEnvironment.MASTER_COMMUNICATION_ID);
+      .forkInjector(baseInjector, MessageEnvironment.MASTER_COMMUNICATION_ID);
     blockManagerMaster = injector.getInstance(BlockManagerMaster.class);
     when(serializerManager.getSerializer(any())).thenReturn(SERIALIZER);
 
@@ -142,7 +148,7 @@
       IntStream.range(0, NUM_READ_VERTICES).forEach(readTaskIdx -> {
         final int partitionsCount = writeTaskIdx * NUM_READ_VERTICES + readTaskIdx;
         partitionsForBlock.add(new NonSerializedPartition(
-            readTaskIdx, getRangedNumList(partitionsCount * DATA_SIZE, (partitionsCount + 1) * DATA_SIZE), -1, -1));
+          readTaskIdx, getRangedNumList(partitionsCount * DATA_SIZE, (partitionsCount + 1) * DATA_SIZE), -1, -1));
       });
     });
 
@@ -177,10 +183,10 @@
       final List<NonSerializedPartition<Integer>> hashedBlock = new ArrayList<>(HASH_RANGE);
       // Generates the data having each hash value.
       IntStream.range(0, HASH_RANGE).forEach(hashValue ->
-          hashedBlock.add(new NonSerializedPartition(hashValue, getFixedKeyRangedNumList(
-              hashValue,
-              writeTaskIdx * HASH_DATA_SIZE * HASH_RANGE + hashValue * HASH_DATA_SIZE,
-              writeTaskIdx * HASH_DATA_SIZE * HASH_RANGE + (hashValue + 1) * HASH_DATA_SIZE), -1, -1)));
+        hashedBlock.add(new NonSerializedPartition(hashValue, getFixedKeyRangedNumList(
+          hashValue,
+          writeTaskIdx * HASH_DATA_SIZE * HASH_RANGE + hashValue * HASH_DATA_SIZE,
+          writeTaskIdx * HASH_DATA_SIZE * HASH_RANGE + (hashValue + 1) * HASH_DATA_SIZE), -1, -1)));
       hashedBlockPartitionList.add(hashedBlock);
     });
 
@@ -189,7 +195,7 @@
     readKeyRangeList.add(HashRange.of(0, smallDataRangeEnd));
     IntStream.range(0, NUM_READ_HASH_TASKS - 1).forEach(readTaskIdx -> {
       readKeyRangeList.add(HashRange.of(smallDataRangeEnd + readTaskIdx,
-          smallDataRangeEnd + readTaskIdx + 1));
+        smallDataRangeEnd + readTaskIdx + 1));
     });
 
     // Generates the expected result of hash range retrieval for each read task.
@@ -264,9 +270,9 @@
   public void testGlusterFileStore() throws Exception {
     FileUtils.deleteDirectory(new File(TMP_FILE_DIRECTORY));
     final RemoteFileStore writerSideRemoteFileStore =
-        createGlusterFileStore("writer");
+      createGlusterFileStore("writer");
     final RemoteFileStore readerSideRemoteFileStore =
-        createGlusterFileStore("reader");
+      createGlusterFileStore("reader");
 
     shuffle(writerSideRemoteFileStore, readerSideRemoteFileStore);
     concurrentRead(writerSideRemoteFileStore, readerSideRemoteFileStore);
@@ -275,7 +281,7 @@
   }
 
   private GlusterFileStore createGlusterFileStore(final String executorId)
-      throws InjectionException {
+    throws InjectionException {
     final Injector injector = LocalMessageEnvironment.forkInjector(baseInjector, executorId);
     injector.bindVolatileParameter(JobConf.GlusterVolumeDirectory.class, TMP_FILE_DIRECTORY);
     injector.bindVolatileParameter(JobConf.JobId.class, "GFS test");
@@ -303,28 +309,28 @@
 
     // Write concurrently
     IntStream.range(0, NUM_WRITE_VERTICES).forEach(writeTaskIdx ->
-        writeFutureList.add(writeExecutor.submit(new Callable<Boolean>() {
-          @Override
-          public Boolean call() {
-            try {
-              final String blockId = blockIdList.get(writeTaskIdx);
-              final Block block = writerSideStore.createBlock(blockId);
-              for (final NonSerializedPartition<Integer> partition : partitionsPerBlock.get(writeTaskIdx)) {
-                final Iterable data = partition.getData();
-                data.forEach(element -> block.write(partition.getKey(), element));
-              }
-              block.commit();
-              writerSideStore.writeBlock(block);
-              blockManagerMaster.onProducerTaskScheduled(getTaskId(writeTaskIdx), Collections.singleton(blockId));
-              blockManagerMaster.onBlockStateChanged(blockId, BlockState.State.AVAILABLE,
-                  "Writer side of the shuffle edge");
-              return true;
-            } catch (final Exception e) {
-              e.printStackTrace();
-              return false;
+      writeFutureList.add(writeExecutor.submit(new Callable<Boolean>() {
+        @Override
+        public Boolean call() {
+          try {
+            final String blockId = blockIdList.get(writeTaskIdx);
+            final Block block = writerSideStore.createBlock(blockId);
+            for (final NonSerializedPartition<Integer> partition : partitionsPerBlock.get(writeTaskIdx)) {
+              final Iterable data = partition.getData();
+              data.forEach(element -> block.write(partition.getKey(), element));
             }
+            block.commit();
+            writerSideStore.writeBlock(block);
+            blockManagerMaster.onProducerTaskScheduled(getTaskId(writeTaskIdx), Collections.singleton(blockId));
+            blockManagerMaster.onBlockStateChanged(blockId, BlockState.State.AVAILABLE,
+              "Writer side of the shuffle edge");
+            return true;
+          } catch (final Exception e) {
+            e.printStackTrace();
+            return false;
           }
-        })));
+        }
+      })));
 
     // Wait each writer to success
     IntStream.range(0, NUM_WRITE_VERTICES).forEach(writer -> {
@@ -338,22 +344,22 @@
 
     // Read concurrently and check whether the result is equal to the input
     IntStream.range(0, NUM_READ_VERTICES).forEach(readTaskIdx ->
-        readFutureList.add(readExecutor.submit(new Callable<Boolean>() {
-          @Override
-          public Boolean call() {
-            try {
-              for (int writeTaskIdx = 0; writeTaskIdx < NUM_WRITE_VERTICES; writeTaskIdx++) {
-                readResultCheck(blockIdList.get(writeTaskIdx),
-                    HashRange.of(readTaskIdx, readTaskIdx + 1),
-                    readerSideStore, partitionsPerBlock.get(writeTaskIdx).get(readTaskIdx).getData());
-              }
-              return true;
-            } catch (final Exception e) {
-              e.printStackTrace();
-              return false;
+      readFutureList.add(readExecutor.submit(new Callable<Boolean>() {
+        @Override
+        public Boolean call() {
+          try {
+            for (int writeTaskIdx = 0; writeTaskIdx < NUM_WRITE_VERTICES; writeTaskIdx++) {
+              readResultCheck(blockIdList.get(writeTaskIdx),
+                HashRange.of(readTaskIdx, readTaskIdx + 1),
+                readerSideStore, partitionsPerBlock.get(writeTaskIdx).get(readTaskIdx).getData());
             }
+            return true;
+          } catch (final Exception e) {
+            e.printStackTrace();
+            return false;
           }
-        })));
+        }
+      })));
 
     // Wait each reader to success
     IntStream.range(0, NUM_READ_VERTICES).forEach(reader -> {
@@ -378,9 +384,9 @@
     readExecutor.shutdown();
 
     System.out.println(
-        "Shuffle - write time in millis: " + (writeEndNano - startNano) / 1000000 +
-            ", Read time in millis: " + (readEndNano - writeEndNano) / 1000000 + " in store " +
-            writerSideStore.getClass().toString());
+      "Shuffle - write time in millis: " + (writeEndNano - startNano) / 1000000 +
+        ", Read time in millis: " + (readEndNano - writeEndNano) / 1000000 + " in store " +
+        writerSideStore.getClass().toString());
   }
 
   /**
@@ -412,7 +418,7 @@
           writerSideStore.writeBlock(block);
           blockManagerMaster.onProducerTaskScheduled(getTaskId(0), Collections.singleton(block.getId()));
           blockManagerMaster.onBlockStateChanged(
-              concBlockId, BlockState.State.AVAILABLE, "Writer side of the concurrent read edge");
+            concBlockId, BlockState.State.AVAILABLE, "Writer side of the concurrent read edge");
           return true;
         } catch (final Exception e) {
           e.printStackTrace();
@@ -431,18 +437,18 @@
 
     // Read the single block concurrently and check whether the result is equal to the input
     IntStream.range(0, NUM_CONC_READ_TASKS).forEach(readTaskIdx ->
-        readFutureList.add(readExecutor.submit(new Callable<Boolean>() {
-          @Override
-          public Boolean call() {
-            try {
-              readResultCheck(concBlockId, HashRange.all(), readerSideStore, concBlockPartition.getData());
-              return true;
-            } catch (final Exception e) {
-              e.printStackTrace();
-              return false;
-            }
+      readFutureList.add(readExecutor.submit(new Callable<Boolean>() {
+        @Override
+        public Boolean call() {
+          try {
+            readResultCheck(concBlockId, HashRange.all(), readerSideStore, concBlockPartition.getData());
+            return true;
+          } catch (final Exception e) {
+            e.printStackTrace();
+            return false;
           }
-        })));
+        }
+      })));
 
     // Wait each reader to success
     IntStream.range(0, NUM_CONC_READ_TASKS).forEach(reader -> {
@@ -464,9 +470,9 @@
     readExecutor.shutdown();
 
     System.out.println(
-        "Concurrent read - write time in millis: " + (writeEndNano - startNano) / 1000000 +
-            ", Read time in millis: " + (readEndNano - writeEndNano) / 1000000 + " in store " +
-            writerSideStore.getClass().toString());
+      "Concurrent read - write time in millis: " + (writeEndNano - startNano) / 1000000 +
+        ", Read time in millis: " + (readEndNano - writeEndNano) / 1000000 + " in store " +
+        writerSideStore.getClass().toString());
   }
 
   /**
@@ -474,7 +480,7 @@
    * Assumes following circumstances:
    * Task 1 (write (hash 0~3))->         (read (hash 0~1))-> Task 3
    * Task 2 (write (hash 0~3))-> shuffle (read (hash 2))-> Task 4
-   *                                     (read (hash 3))-> Task 5
+   * (read (hash 3))-> Task 5
    * It checks that each writer and reader does not throw any exception
    * and the read data is identical with written data (including the order).
    */
@@ -488,28 +494,28 @@
 
     // Write concurrently
     IntStream.range(0, NUM_WRITE_HASH_TASKS).forEach(writeTaskIdx ->
-        writeFutureList.add(writeExecutor.submit(new Callable<Boolean>() {
-          @Override
-          public Boolean call() {
-            try {
-              final String blockId = hashedBlockIdList.get(writeTaskIdx);
-              final Block block = writerSideStore.createBlock(blockId);
-              for (final NonSerializedPartition<Integer> partition : hashedBlockPartitionList.get(writeTaskIdx)) {
-                final Iterable data = partition.getData();
-                data.forEach(element -> block.write(partition.getKey(), element));
-              }
-              block.commit();
-              writerSideStore.writeBlock(block);
-              blockManagerMaster.onProducerTaskScheduled(getTaskId(writeTaskIdx), Collections.singleton(blockId));
-              blockManagerMaster.onBlockStateChanged(blockId, BlockState.State.AVAILABLE,
-                  "Writer side of the shuffle in hash range edge");
-              return true;
-            } catch (final Exception e) {
-              e.printStackTrace();
-              return false;
+      writeFutureList.add(writeExecutor.submit(new Callable<Boolean>() {
+        @Override
+        public Boolean call() {
+          try {
+            final String blockId = hashedBlockIdList.get(writeTaskIdx);
+            final Block block = writerSideStore.createBlock(blockId);
+            for (final NonSerializedPartition<Integer> partition : hashedBlockPartitionList.get(writeTaskIdx)) {
+              final Iterable data = partition.getData();
+              data.forEach(element -> block.write(partition.getKey(), element));
             }
+            block.commit();
+            writerSideStore.writeBlock(block);
+            blockManagerMaster.onProducerTaskScheduled(getTaskId(writeTaskIdx), Collections.singleton(blockId));
+            blockManagerMaster.onBlockStateChanged(blockId, BlockState.State.AVAILABLE,
+              "Writer side of the shuffle in hash range edge");
+            return true;
+          } catch (final Exception e) {
+            e.printStackTrace();
+            return false;
           }
-        })));
+        }
+      })));
 
     // Wait each writer to success
     IntStream.range(0, NUM_WRITE_HASH_TASKS).forEach(writer -> {
@@ -523,22 +529,22 @@
 
     // Read concurrently and check whether the result is equal to the expected data
     IntStream.range(0, NUM_READ_HASH_TASKS).forEach(readTaskIdx ->
-        readFutureList.add(readExecutor.submit(new Callable<Boolean>() {
-          @Override
-          public Boolean call() {
-            try {
-              for (int writeTaskIdx = 0; writeTaskIdx < NUM_WRITE_HASH_TASKS; writeTaskIdx++) {
-                final KeyRange<Integer> hashRangeToRetrieve = readKeyRangeList.get(readTaskIdx);
-                readResultCheck(hashedBlockIdList.get(writeTaskIdx), hashRangeToRetrieve,
-                    readerSideStore, expectedDataInRange.get(readTaskIdx).get(writeTaskIdx));
-              }
-              return true;
-            } catch (final Exception e) {
-              e.printStackTrace();
-              return false;
+      readFutureList.add(readExecutor.submit(new Callable<Boolean>() {
+        @Override
+        public Boolean call() {
+          try {
+            for (int writeTaskIdx = 0; writeTaskIdx < NUM_WRITE_HASH_TASKS; writeTaskIdx++) {
+              final KeyRange<Integer> hashRangeToRetrieve = readKeyRangeList.get(readTaskIdx);
+              readResultCheck(hashedBlockIdList.get(writeTaskIdx), hashRangeToRetrieve,
+                readerSideStore, expectedDataInRange.get(readTaskIdx).get(writeTaskIdx));
             }
+            return true;
+          } catch (final Exception e) {
+            e.printStackTrace();
+            return false;
           }
-        })));
+        }
+      })));
 
     // Wait each reader to success
     IntStream.range(0, NUM_READ_HASH_TASKS).forEach(reader -> {
@@ -556,7 +562,7 @@
       final boolean exist = writerSideStore.deleteBlock(hashedBlockIdList.get(writer));
       if (!exist) {
         throw new RuntimeException("The result of deleteBlock(" +
-            hashedBlockIdList.get(writer) + ") is false");
+          hashedBlockIdList.get(writer) + ") is false");
       }
     });
 
@@ -564,9 +570,9 @@
     readExecutor.shutdown();
 
     System.out.println(
-        "Shuffle in hash range - write time in millis: " + (writeEndNano - startNano) / 1000000 +
-            ", Read time in millis: " + (readEndNano - writeEndNano) / 1000000 + " in store " +
-            writerSideStore.getClass().toString());
+      "Shuffle in hash range - write time in millis: " + (writeEndNano - startNano) / 1000000 +
+        ", Read time in millis: " + (readEndNano - writeEndNano) / 1000000 + " in store " +
+        writerSideStore.getClass().toString());
   }
 
   private List getFixedKeyRangedNumList(final int key,
@@ -590,7 +596,7 @@
     }
     final Iterable<NonSerializedPartition> nonSerializedResult = optionalBlock.get().readPartitions(hashRange);
     final Iterable serToNonSerialized = DataUtil.convertToNonSerPartitions(
-        SERIALIZER, optionalBlock.get().readSerializedPartitions(hashRange));
+      SERIALIZER, optionalBlock.get().readSerializedPartitions(hashRange));
 
     assertEquals(expectedResult, DataUtil.concatNonSerPartitions(nonSerializedResult));
     assertEquals(expectedResult, DataUtil.concatNonSerPartitions(serToNonSerialized));
diff --git a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/data/BlockTest.java b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/data/BlockTest.java
index cf83696..02256f8 100644
--- a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/data/BlockTest.java
+++ b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/data/BlockTest.java
@@ -18,8 +18,8 @@
  */
 package org.apache.nemo.runtime.executor.data;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.nemo.common.HashRange;
-import org.apache.nemo.common.KeyRange;
 import org.apache.nemo.common.coder.IntDecoderFactory;
 import org.apache.nemo.common.coder.IntEncoderFactory;
 import org.apache.nemo.runtime.executor.data.block.Block;
@@ -29,7 +29,6 @@
 import org.apache.nemo.runtime.executor.data.metadata.LocalFileMetadata;
 import org.apache.nemo.runtime.executor.data.partition.NonSerializedPartition;
 import org.apache.nemo.runtime.executor.data.streamchainer.Serializer;
-import org.apache.commons.io.FileUtils;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
@@ -126,6 +125,7 @@
 
   /**
    * Compare the contents of a list and an iterable.
+   *
    * @param list     the list to test.
    * @param iterable the iterable to test.
    * @throws RuntimeException if the contents are not matched.
diff --git a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/data/BlockTransferThrottlerTest.java b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/data/BlockTransferThrottlerTest.java
index 082be5a..1249933 100644
--- a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/data/BlockTransferThrottlerTest.java
+++ b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/data/BlockTransferThrottlerTest.java
@@ -29,21 +29,24 @@
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
+
 import static org.junit.Assert.assertFalse;
 
 public final class BlockTransferThrottlerTest {
   private static final String THREAD_NAME = BlockTransferThrottler.class.getSimpleName() + "-TestThread";
   private static final String RUNTIME_EDGE_0 = "RuntimeEdge0";
   private static final int WAIT_TIME = 1000;
+
   /**
    * Creates {@link BlockTransferThrottler} for testing.
+   *
    * @param maxNum value for {@link JobConf.MaxNumDownloadsForARuntimeEdge} parameter.
    * @return {@link BlockTransferThrottler} object created.
    */
   private final BlockTransferThrottler getQueue(final int maxNum) {
     final Configuration conf = Tang.Factory.getTang().newConfigurationBuilder()
-        .bindNamedParameter(JobConf.MaxNumDownloadsForARuntimeEdge.class, String.valueOf(maxNum))
-        .build();
+      .bindNamedParameter(JobConf.MaxNumDownloadsForARuntimeEdge.class, String.valueOf(maxNum))
+      .build();
     final Injector injector = Tang.Factory.getTang().newInjector(conf);
     try {
       return injector.getInstance(BlockTransferThrottler.class);
@@ -55,7 +58,7 @@
   @Test(timeout = WAIT_TIME * 2)
   public void test() throws InterruptedException, ExecutionException {
     final ExecutorService executorService = Executors.newSingleThreadExecutor(
-        runnable -> new Thread(runnable, THREAD_NAME));
+      runnable -> new Thread(runnable, THREAD_NAME));
     final BlockTransferThrottler queue = getQueue(3);
     final Future executorServiceFuture = executorService.submit(() -> {
       try {
diff --git a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/datatransfer/DataTransferTest.java b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/datatransfer/DataTransferTest.java
index 88884eb..e5e01dd 100644
--- a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/datatransfer/DataTransferTest.java
+++ b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/datatransfer/DataTransferTest.java
@@ -18,21 +18,22 @@
  */
 package org.apache.nemo.runtime.executor.datatransfer;
 
-import org.apache.nemo.common.*;
+import org.apache.commons.io.FileUtils;
+import org.apache.nemo.common.Pair;
 import org.apache.nemo.common.coder.*;
+import org.apache.nemo.common.dag.DAG;
+import org.apache.nemo.common.dag.DAGBuilder;
 import org.apache.nemo.common.eventhandler.PubSubEventHandlerWrapper;
 import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.edge.executionproperty.*;
+import org.apache.nemo.common.ir.executionproperty.ExecutionPropertyMap;
 import org.apache.nemo.common.ir.executionproperty.VertexExecutionProperty;
-import org.apache.nemo.common.ir.vertex.SourceVertex;
 import org.apache.nemo.common.ir.vertex.IRVertex;
+import org.apache.nemo.common.ir.vertex.SourceVertex;
 import org.apache.nemo.common.ir.vertex.executionproperty.ParallelismProperty;
 import org.apache.nemo.common.ir.vertex.executionproperty.ScheduleGroupProperty;
 import org.apache.nemo.common.test.EmptyComponents;
 import org.apache.nemo.conf.JobConf;
-import org.apache.nemo.common.dag.DAG;
-import org.apache.nemo.common.dag.DAGBuilder;
-import org.apache.nemo.common.ir.executionproperty.ExecutionPropertyMap;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.message.MessageEnvironment;
 import org.apache.nemo.runtime.common.message.MessageParameters;
@@ -48,8 +49,9 @@
 import org.apache.nemo.runtime.executor.data.BlockManagerWorker;
 import org.apache.nemo.runtime.executor.data.DataUtil;
 import org.apache.nemo.runtime.executor.data.SerializerManager;
-import org.apache.nemo.runtime.master.*;
-import org.apache.commons.io.FileUtils;
+import org.apache.nemo.runtime.master.BlockManagerMaster;
+import org.apache.nemo.runtime.master.ClientRPC;
+import org.apache.nemo.runtime.master.RuntimeMaster;
 import org.apache.nemo.runtime.master.metric.MetricManagerMaster;
 import org.apache.nemo.runtime.master.metric.MetricMessageHandler;
 import org.apache.nemo.runtime.master.scheduler.BatchScheduler;
@@ -74,7 +76,6 @@
 import java.io.File;
 import java.io.IOException;
 import java.util.*;
-import java.util.List;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -84,41 +85,40 @@
 import java.util.stream.StreamSupport;
 
 import static org.apache.nemo.common.dag.DAG.EMPTY_DAG_DIRECTORY;
-import static org.apache.nemo.runtime.common.RuntimeTestUtil.getRangedNumList;
 import static org.apache.nemo.runtime.common.RuntimeTestUtil.flatten;
+import static org.apache.nemo.runtime.common.RuntimeTestUtil.getRangedNumList;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.mockito.Mockito.mock;
-import static org.powermock.api.mockito.PowerMockito.when;
 
 /**
  * Tests {@link InputReader} and {@link OutputWriter}.
- *
+ * <p>
  * Execute {@code mvn test -Dtest=DataTransferTest -Dio.netty.leakDetectionLevel=paranoid}
  * to run the test with leakage reports for netty {@link io.netty.util.ReferenceCounted} objects.
  */
 @RunWith(PowerMockRunner.class)
 @PrepareForTest({PubSubEventHandlerWrapper.class, MetricMessageHandler.class,
-    SourceVertex.class, ClientRPC.class, MetricManagerMaster.class})
+  SourceVertex.class, ClientRPC.class, MetricManagerMaster.class})
 public final class DataTransferTest {
   private static final String EXECUTOR_ID_PREFIX = "Executor";
   private static final DataStoreProperty.Value MEMORY_STORE =
-      DataStoreProperty.Value.MemoryStore;
+    DataStoreProperty.Value.MemoryStore;
   private static final DataStoreProperty.Value SER_MEMORY_STORE =
-      DataStoreProperty.Value.SerializedMemoryStore;
+    DataStoreProperty.Value.SerializedMemoryStore;
   private static final DataStoreProperty.Value LOCAL_FILE_STORE =
-      DataStoreProperty.Value.LocalFileStore;
+    DataStoreProperty.Value.LocalFileStore;
   private static final DataStoreProperty.Value REMOTE_FILE_STORE =
-      DataStoreProperty.Value.GlusterFileStore;
+    DataStoreProperty.Value.GlusterFileStore;
   private static final String TMP_LOCAL_FILE_DIRECTORY = "./tmpLocalFiles";
   private static final String TMP_REMOTE_FILE_DIRECTORY = "./tmpRemoteFiles";
   private static final int PARALLELISM_TEN = 10;
   private static final String EDGE_PREFIX_TEMPLATE = "Dummy(%d)";
   private static final AtomicInteger TEST_INDEX = new AtomicInteger(0);
   private static final EncoderFactory ENCODER_FACTORY =
-      PairEncoderFactory.of(IntEncoderFactory.of(), IntEncoderFactory.of());
+    PairEncoderFactory.of(IntEncoderFactory.of(), IntEncoderFactory.of());
   private static final DecoderFactory DECODER_FACTORY =
-      PairDecoderFactory.of(IntDecoderFactory.of(), IntDecoderFactory.of());
+    PairDecoderFactory.of(IntDecoderFactory.of(), IntDecoderFactory.of());
   private static final Tang TANG = Tang.Factory.getTang();
 
   private BlockManagerMaster master;
@@ -130,13 +130,13 @@
   @Before
   public void setUp() throws InjectionException {
     final Configuration configuration = Tang.Factory.getTang().newConfigurationBuilder()
-        .bindNamedParameter(JobConf.ScheduleSerThread.class, "1")
-        .build();
+      .bindNamedParameter(JobConf.ScheduleSerThread.class, "1")
+      .build();
     final Injector baseInjector = Tang.Factory.getTang().newInjector(configuration);
     baseInjector.bindVolatileInstance(EvaluatorRequestor.class, mock(EvaluatorRequestor.class));
     final Injector dispatcherInjector = LocalMessageDispatcher.forkInjector(baseInjector);
     final Injector injector = LocalMessageEnvironment.forkInjector(dispatcherInjector,
-        MessageEnvironment.MASTER_COMMUNICATION_ID);
+      MessageEnvironment.MASTER_COMMUNICATION_ID);
 
     final PlanRewriter planRewriter = mock(PlanRewriter.class);
     injector.bindVolatileInstance(PlanRewriter.class, planRewriter);
@@ -159,11 +159,11 @@
 
     this.master = master;
     final Pair<BlockManagerWorker, IntermediateDataIOFactory> pair1 = createWorker(
-        EXECUTOR_ID_PREFIX + executorCount.getAndIncrement(), dispatcherInjector, nameClientInjector);
+      EXECUTOR_ID_PREFIX + executorCount.getAndIncrement(), dispatcherInjector, nameClientInjector);
     this.worker1 = pair1.left();
     this.transferFactory = pair1.right();
     this.worker2 = createWorker(EXECUTOR_ID_PREFIX + executorCount.getAndIncrement(), dispatcherInjector,
-        nameClientInjector).left();
+      nameClientInjector).left();
   }
 
   @After
@@ -173,17 +173,17 @@
   }
 
   private Pair<BlockManagerWorker, IntermediateDataIOFactory> createWorker(
-      final String executorId,
-      final Injector dispatcherInjector,
-      final Injector nameClientInjector) throws InjectionException {
+    final String executorId,
+    final Injector dispatcherInjector,
+    final Injector nameClientInjector) throws InjectionException {
     final Injector messageEnvironmentInjector = LocalMessageEnvironment.forkInjector(dispatcherInjector, executorId);
     final MessageEnvironment messageEnvironment = messageEnvironmentInjector.getInstance(MessageEnvironment.class);
     final PersistentConnectionToMasterMap conToMaster = messageEnvironmentInjector
-        .getInstance(PersistentConnectionToMasterMap.class);
+      .getInstance(PersistentConnectionToMasterMap.class);
     final Configuration executorConfiguration = TANG.newConfigurationBuilder()
-        .bindNamedParameter(JobConf.ExecutorId.class, executorId)
-        .bindNamedParameter(MessageParameters.SenderId.class, executorId)
-        .build();
+      .bindNamedParameter(JobConf.ExecutorId.class, executorId)
+      .bindNamedParameter(MessageParameters.SenderId.class, executorId)
+      .build();
     final Injector injector = nameClientInjector.forkInjector(executorConfiguration);
     injector.bindVolatileInstance(MessageEnvironment.class, messageEnvironment);
     injector.bindVolatileInstance(PersistentConnectionToMasterMap.class, conToMaster);
@@ -210,15 +210,15 @@
   private Injector createNameClientInjector() {
     try {
       final Configuration configuration = TANG.newConfigurationBuilder()
-          .bindImplementation(IdentifierFactory.class, StringIdentifierFactory.class)
-          .build();
+        .bindImplementation(IdentifierFactory.class, StringIdentifierFactory.class)
+        .build();
       final Injector injector = TANG.newInjector(configuration);
       final LocalAddressProvider localAddressProvider = injector.getInstance(LocalAddressProvider.class);
       final NameServer nameServer = injector.getInstance(NameServer.class);
       final Configuration nameClientConfiguration = NameResolverConfiguration.CONF
-          .set(NameResolverConfiguration.NAME_SERVER_HOSTNAME, localAddressProvider.getLocalAddress())
-          .set(NameResolverConfiguration.NAME_SERVICE_PORT, nameServer.getPort())
-          .build();
+        .set(NameResolverConfiguration.NAME_SERVER_HOSTNAME, localAddressProvider.getLocalAddress())
+        .set(NameResolverConfiguration.NAME_SERVICE_PORT, nameServer.getPort())
+        .build();
       return injector.forkInjector(nameClientConfiguration);
     } catch (final InjectionException e) {
       throw new RuntimeException(e);
@@ -346,7 +346,7 @@
     final List<List> dataReadList = new ArrayList<>();
     IntStream.range(0, PARALLELISM_TEN).forEach(dstTaskIndex -> {
       final InputReader reader =
-          new BlockInputReader(dstTaskIndex, srcVertex, dummyEdge, receiver);
+        new BlockInputReader(dstTaskIndex, srcVertex, dummyEdge, receiver);
 
       assertEquals(PARALLELISM_TEN, InputReader.getSourceParallelism(reader));
 
@@ -394,7 +394,7 @@
     dummyIREdge.setProperty(PartitionerProperty.of(PartitionerProperty.Type.Hash));
     dummyIREdge.setProperty(DuplicateEdgeGroupProperty.of(new DuplicateEdgeGroupPropertyValue("dummy")));
     final Optional<DuplicateEdgeGroupPropertyValue> duplicateDataProperty
-        = dummyIREdge.getPropertyValue(DuplicateEdgeGroupProperty.class);
+      = dummyIREdge.getPropertyValue(DuplicateEdgeGroupProperty.class);
     duplicateDataProperty.get().setRepresentativeEdgeId(edgeId);
     duplicateDataProperty.get().setGroupSize(2);
     dummyIREdge.setProperty(DataStoreProperty.of(store));
@@ -432,9 +432,9 @@
     final List<List> dataReadList2 = new ArrayList<>();
     IntStream.range(0, PARALLELISM_TEN).forEach(dstTaskIndex -> {
       final InputReader reader =
-          new BlockInputReader(dstTaskIndex, srcVertex, dummyEdge, receiver);
+        new BlockInputReader(dstTaskIndex, srcVertex, dummyEdge, receiver);
       final InputReader reader2 =
-          new BlockInputReader(dstTaskIndex, srcVertex, dummyEdge2, receiver);
+        new BlockInputReader(dstTaskIndex, srcVertex, dummyEdge2, receiver);
 
       assertEquals(PARALLELISM_TEN, InputReader.getSourceParallelism(reader));
 
diff --git a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/datatransfer/InputWatermarkManagerTest.java b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/datatransfer/InputWatermarkManagerTest.java
index 5242d46..f37f843 100644
--- a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/datatransfer/InputWatermarkManagerTest.java
+++ b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/datatransfer/InputWatermarkManagerTest.java
@@ -17,18 +17,21 @@
  * under the License.
  */
 package org.apache.nemo.runtime.executor.datatransfer;
+
 import org.apache.nemo.common.ir.vertex.OperatorVertex;
 import org.apache.nemo.common.ir.vertex.transform.Transform;
 import org.apache.nemo.common.punctuation.Watermark;
 import org.junit.Test;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
+
 import java.util.LinkedList;
 import java.util.List;
+
+import static org.junit.Assert.assertEquals;
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
-import static org.junit.Assert.assertEquals;
 
 public final class InputWatermarkManagerTest {
 
diff --git a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/task/ParentTaskDataFetcherTest.java b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/task/ParentTaskDataFetcherTest.java
index 6b94ca7..ae3b90f 100644
--- a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/task/ParentTaskDataFetcherTest.java
+++ b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/task/ParentTaskDataFetcherTest.java
@@ -24,7 +24,6 @@
 import org.apache.nemo.runtime.executor.data.DataUtil;
 import org.apache.nemo.runtime.executor.datatransfer.BlockInputReader;
 import org.apache.nemo.runtime.executor.datatransfer.InputReader;
-import org.apache.nemo.runtime.executor.datatransfer.InputWatermarkManager;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.mockito.Mockito;
@@ -32,7 +31,10 @@
 import org.powermock.modules.junit4.PowerMockRunner;
 
 import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.Executors;
 
@@ -48,7 +50,7 @@
 @PrepareForTest({InputReader.class, VertexHarness.class, BlockInputReader.class})
 public final class ParentTaskDataFetcherTest {
 
-  @Test(timeout=5000)
+  @Test(timeout = 5000)
   public void testEmpty() throws Exception {
     final List<String> empty = new ArrayList<>(0); // empty data
     final InputReader inputReader = generateInputReader(generateCompletableFuture(empty.iterator()));
@@ -58,7 +60,7 @@
     assertEquals(Finishmark.getInstance(), fetcher.fetchDataElement());
   }
 
-  @Test(timeout=5000)
+  @Test(timeout = 5000)
   public void testNull() throws Exception {
     final List<String> oneNull = new ArrayList<>(1); // empty data
     oneNull.add(null);
@@ -71,7 +73,7 @@
     assertEquals(null, fetcher.fetchDataElement());
   }
 
-  @Test(timeout=5000)
+  @Test(timeout = 5000)
   public void testNonEmpty() throws Exception {
     // InputReader
     final String singleData = "Single";
@@ -86,7 +88,7 @@
     assertEquals(singleData, fetcher.fetchDataElement());
   }
 
-  @Test(timeout=5000, expected = IOException.class)
+  @Test(timeout = 5000, expected = IOException.class)
   public void testErrorWhenRPC() throws Exception {
     // Failing future
     final CompletableFuture failingFuture = CompletableFuture.runAsync(() -> {
@@ -108,7 +110,7 @@
     assertTrue(failingFuture.isCompletedExceptionally());
   }
 
-  @Test(timeout=5000, expected = IOException.class)
+  @Test(timeout = 5000, expected = IOException.class)
   public void testErrorWhenReadingData() throws Exception {
     // Failed iterator
     final InputReader inputReader = generateInputReader(generateCompletableFuture(new FailedIterator()));
@@ -134,7 +136,7 @@
   }
 
   private CompletableFuture generateCompletableFuture(final Iterator iterator) {
-   return CompletableFuture.completedFuture(DataUtil.IteratorWithNumBytes.of(iterator));
+    return CompletableFuture.completedFuture(DataUtil.IteratorWithNumBytes.of(iterator));
   }
 
   private class FailedIterator implements Iterator {
diff --git a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/task/TaskExecutorTest.java b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/task/TaskExecutorTest.java
index 41dad41..89e1fcc 100644
--- a/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/task/TaskExecutorTest.java
+++ b/runtime/executor/src/test/java/org/apache/nemo/runtime/executor/task/TaskExecutorTest.java
@@ -19,36 +19,38 @@
 package org.apache.nemo.runtime.executor.task;
 
 import org.apache.nemo.common.Pair;
-import org.apache.nemo.common.ir.BoundedIteratorReadable;
-import org.apache.nemo.common.ir.OutputCollector;
 import org.apache.nemo.common.dag.DAG;
 import org.apache.nemo.common.dag.DAGBuilder;
+import org.apache.nemo.common.ir.BoundedIteratorReadable;
+import org.apache.nemo.common.ir.OutputCollector;
 import org.apache.nemo.common.ir.Readable;
 import org.apache.nemo.common.ir.edge.IREdge;
 import org.apache.nemo.common.ir.edge.executionproperty.AdditionalOutputTagProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty;
 import org.apache.nemo.common.ir.edge.executionproperty.DataStoreProperty;
 import org.apache.nemo.common.ir.executionproperty.EdgeExecutionProperty;
+import org.apache.nemo.common.ir.executionproperty.ExecutionPropertyMap;
 import org.apache.nemo.common.ir.executionproperty.VertexExecutionProperty;
+import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.ir.vertex.InMemorySourceVertex;
 import org.apache.nemo.common.ir.vertex.OperatorVertex;
-import org.apache.nemo.common.ir.vertex.executionproperty.ParallelismProperty;
 import org.apache.nemo.common.ir.vertex.SourceVertex;
+import org.apache.nemo.common.ir.vertex.executionproperty.ParallelismProperty;
 import org.apache.nemo.common.ir.vertex.transform.Transform;
-import org.apache.nemo.common.ir.executionproperty.ExecutionPropertyMap;
-import org.apache.nemo.common.ir.vertex.IRVertex;
 import org.apache.nemo.common.punctuation.Watermark;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.message.PersistentConnectionToMasterMap;
-import org.apache.nemo.runtime.common.plan.Stage;
-import org.apache.nemo.runtime.common.plan.Task;
-import org.apache.nemo.runtime.common.plan.StageEdge;
 import org.apache.nemo.runtime.common.plan.RuntimeEdge;
+import org.apache.nemo.runtime.common.plan.Stage;
+import org.apache.nemo.runtime.common.plan.StageEdge;
+import org.apache.nemo.runtime.common.plan.Task;
 import org.apache.nemo.runtime.executor.MetricMessageSender;
 import org.apache.nemo.runtime.executor.TaskStateManager;
 import org.apache.nemo.runtime.executor.data.BroadcastManagerWorker;
 import org.apache.nemo.runtime.executor.data.DataUtil;
-import org.apache.nemo.runtime.executor.datatransfer.*;
+import org.apache.nemo.runtime.executor.datatransfer.InputReader;
+import org.apache.nemo.runtime.executor.datatransfer.IntermediateDataIOFactory;
+import org.apache.nemo.runtime.executor.datatransfer.OutputWriter;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -68,9 +70,7 @@
 import java.util.stream.Collectors;
 import java.util.stream.IntStream;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.Assert.*;
 import static org.mockito.ArgumentMatchers.anyInt;
 import static org.mockito.ArgumentMatchers.anyString;
 import static org.mockito.Matchers.any;
@@ -86,7 +86,7 @@
   private static final AtomicInteger RUNTIME_EDGE_ID = new AtomicInteger(0);
   private static final int DATA_SIZE = 100;
   private static final ExecutionPropertyMap<VertexExecutionProperty> TASK_EXECUTION_PROPERTY_MAP
-      = new ExecutionPropertyMap<>("TASK_EXECUTION_PROPERTY_MAP");
+    = new ExecutionPropertyMap<>("TASK_EXECUTION_PROPERTY_MAP");
   private static final int SOURCE_PARALLELISM = 5;
   private static final int FIRST_ATTEMPT = 0;
 
@@ -101,7 +101,7 @@
 
   private String generateTaskId() {
     return RuntimeIdManager.generateTaskId(
-        RuntimeIdManager.generateStageId(stageId.getAndIncrement()), 0, FIRST_ATTEMPT);
+      RuntimeIdManager.generateStageId(stageId.getAndIncrement()), 0, FIRST_ATTEMPT);
   }
 
   @Before
@@ -166,20 +166,20 @@
     vertexIdToReadable.put(sourceIRVertex.getId(), readable);
 
     final DAG<IRVertex, RuntimeEdge<IRVertex>> taskDag =
-        new DAGBuilder<IRVertex, RuntimeEdge<IRVertex>>()
-            .addVertex(sourceIRVertex)
-            .buildWithoutSourceSinkCheck();
+      new DAGBuilder<IRVertex, RuntimeEdge<IRVertex>>()
+        .addVertex(sourceIRVertex)
+        .buildWithoutSourceSinkCheck();
 
     final StageEdge taskOutEdge = mockStageEdgeFrom(sourceIRVertex);
     final Task task =
-        new Task(
-            "testSourceVertexDataFetching",
-            generateTaskId(),
-            TASK_EXECUTION_PROPERTY_MAP,
-            new byte[0],
-            Collections.emptyList(),
-            Collections.singletonList(taskOutEdge),
-            vertexIdToReadable);
+      new Task(
+        "testSourceVertexDataFetching",
+        generateTaskId(),
+        TASK_EXECUTION_PROPERTY_MAP,
+        new byte[0],
+        Collections.emptyList(),
+        Collections.singletonList(taskOutEdge),
+        vertexIdToReadable);
 
     // Execute the task.
     final TaskExecutor taskExecutor = getTaskExecutor(task, taskDag);
@@ -189,20 +189,19 @@
     assertTrue(checkEqualElements(elements, runtimeEdgeToOutputData.get(taskOutEdge.getId())));
   }
 
-    /**
+  /**
    * Test invalid parameter failure.
    */
   @Test()
   public void testInvalidInputData() throws Exception {
-    try{
+    try {
       // Execute the task.
       final TaskExecutor taskExecutor = getTaskExecutor(null, null);
       taskExecutor.execute();
 
       // This should not be reached.
       fail();
-    }
-    catch(NullPointerException e){
+    } catch (NullPointerException e) {
       assertEquals(true, true);
     }
   }
@@ -257,23 +256,23 @@
   /**
    * Test parent task data fetching.
    */
-  @Test(timeout=5000)
+  @Test(timeout = 5000)
   public void testParentTaskDataFetching() throws Exception {
     final IRVertex vertex = new OperatorVertex(new StreamTransform());
 
     final DAG<IRVertex, RuntimeEdge<IRVertex>> taskDag = new DAGBuilder<IRVertex, RuntimeEdge<IRVertex>>()
-        .addVertex(vertex)
-        .buildWithoutSourceSinkCheck();
+      .addVertex(vertex)
+      .buildWithoutSourceSinkCheck();
 
     final StageEdge taskOutEdge = mockStageEdgeFrom(vertex);
     final Task task = new Task(
-        "testSourceVertexDataFetching",
-        generateTaskId(),
-        TASK_EXECUTION_PROPERTY_MAP,
-        new byte[0],
-        Collections.singletonList(mockStageEdgeTo(vertex)),
-        Collections.singletonList(taskOutEdge),
-        Collections.emptyMap());
+      "testSourceVertexDataFetching",
+      generateTaskId(),
+      TASK_EXECUTION_PROPERTY_MAP,
+      new byte[0],
+      Collections.singletonList(mockStageEdgeTo(vertex)),
+      Collections.singletonList(taskOutEdge),
+      Collections.emptyMap());
 
     // Execute the task.
     final TaskExecutor taskExecutor = getTaskExecutor(task, taskDag);
@@ -297,13 +296,13 @@
    * The DAG of the task to test will looks like:
    * source1 -> vertex1 -> vertex2
    * source2 -> vertex3 ->
-   *
+   * <p>
    * The vertex2 has two incoming edges (from vertex1 and vertex3)
    * and we test if TaskExecutor handles data and watermarks correctly in this situation.
-   *
+   * <p>
    * source1 emits watermarks:     500 (ts)  600 (ts)   1400 (ts)  1800 (ts)        2500 (ts)
    * source2 emits watermarks:  1000(ts)                                     2200 (ts)
-   *
+   * <p>
    * The vertex2 should receive and emits watermarks 500, 600, 1000, 1800, and 2200
    */
   @Test()
@@ -379,7 +378,7 @@
       new Watermark(1800), new Watermark(2200)), emittedWatermarks);
 
     // Check the output.
-    final List<Integer> doubledElements = new ArrayList<>(elements.size()*2);
+    final List<Integer> doubledElements = new ArrayList<>(elements.size() * 2);
     doubledElements.addAll(elements);
     doubledElements.addAll(elements);
     assertTrue(checkEqualElements(doubledElements, runtimeEdgeToOutputData.get(taskOutEdge.getId())));
@@ -388,32 +387,32 @@
   /**
    * The DAG of the task to test will looks like:
    * parent task -> task (vertex 1 -> task 2) -> child task
-   *
+   * <p>
    * The output data from task 1 will be split according to source parallelism through {@link ParentTaskReaderAnswer}.
    * Because of this, task 1 will process multiple partitions and emit data in multiple times also.
    * On the other hand, task 2 will receive the output data once and produce a single output.
    */
-  @Test(timeout=5000)
+  @Test(timeout = 5000)
   public void testTwoOperators() throws Exception {
     final IRVertex operatorIRVertex1 = new OperatorVertex(new StreamTransform());
     final IRVertex operatorIRVertex2 = new OperatorVertex(new StreamTransform());
 
     final String edgeId = "edge";
     final DAG<IRVertex, RuntimeEdge<IRVertex>> taskDag = new DAGBuilder<IRVertex, RuntimeEdge<IRVertex>>()
-        .addVertex(operatorIRVertex1)
-        .addVertex(operatorIRVertex2)
-        .connectVertices(createEdge(operatorIRVertex1, operatorIRVertex2, edgeId))
-        .buildWithoutSourceSinkCheck();
+      .addVertex(operatorIRVertex1)
+      .addVertex(operatorIRVertex2)
+      .connectVertices(createEdge(operatorIRVertex1, operatorIRVertex2, edgeId))
+      .buildWithoutSourceSinkCheck();
 
     final StageEdge taskOutEdge = mockStageEdgeFrom(operatorIRVertex2);
     final Task task = new Task(
-        "testSourceVertexDataFetching",
-        generateTaskId(),
-        TASK_EXECUTION_PROPERTY_MAP,
-        new byte[0],
-        Collections.singletonList(mockStageEdgeTo(operatorIRVertex1)),
-        Collections.singletonList(taskOutEdge),
-        Collections.emptyMap());
+      "testSourceVertexDataFetching",
+      generateTaskId(),
+      TASK_EXECUTION_PROPERTY_MAP,
+      new byte[0],
+      Collections.singletonList(mockStageEdgeTo(operatorIRVertex1)),
+      Collections.singletonList(taskOutEdge),
+      Collections.emptyMap());
 
     // Execute the task.
     final TaskExecutor taskExecutor = getTaskExecutor(task, taskDag);
@@ -423,7 +422,7 @@
     assertTrue(checkEqualElements(elements, runtimeEdgeToOutputData.get(taskOutEdge.getId())));
   }
 
-  @Test(timeout=5000)
+  @Test(timeout = 5000)
   public void testTwoOperatorsWithBroadcastVariable() {
     final Transform singleListTransform = new CreateSingleListTransform();
 
@@ -433,10 +432,10 @@
 
     final String edgeId = "edge";
     final DAG<IRVertex, RuntimeEdge<IRVertex>> taskDag = new DAGBuilder<IRVertex, RuntimeEdge<IRVertex>>()
-        .addVertex(operatorIRVertex1)
-        .addVertex(operatorIRVertex2)
-        .connectVertices(createEdge(operatorIRVertex1, operatorIRVertex2, edgeId))
-        .buildWithoutSourceSinkCheck();
+      .addVertex(operatorIRVertex1)
+      .addVertex(operatorIRVertex2)
+      .connectVertices(createEdge(operatorIRVertex1, operatorIRVertex2, edgeId))
+      .buildWithoutSourceSinkCheck();
 
     final StageEdge taskOutEdge = mockStageEdgeFrom(operatorIRVertex2);
     final StageEdge taskInEdge = mockStageEdgeTo(operatorIRVertex1);
@@ -444,13 +443,13 @@
     when(broadcastManagerWorker.get(broadcastId)).thenReturn(new ArrayList<>(elements));
 
     final Task task = new Task(
-        "testSourceVertexDataFetching",
-        generateTaskId(),
-        TASK_EXECUTION_PROPERTY_MAP,
-        new byte[0],
-        Collections.singletonList(taskInEdge),
-        Collections.singletonList(taskOutEdge),
-        Collections.emptyMap());
+      "testSourceVertexDataFetching",
+      generateTaskId(),
+      TASK_EXECUTION_PROPERTY_MAP,
+      new byte[0],
+      Collections.singletonList(taskInEdge),
+      Collections.singletonList(taskOutEdge),
+      Collections.emptyMap());
 
     // Execute the task.
     final TaskExecutor taskExecutor = getTaskExecutor(task, taskDag);
@@ -466,20 +465,20 @@
   /**
    * The DAG of the task to test looks like:
    * parent vertex 1 --+-- vertex 2 (main tag)
-   *                   +-- vertex 3 (additional tag 1)
-   *                   +-- vertex 4 (additional tag 2)
-   *
+   * +-- vertex 3 (additional tag 1)
+   * +-- vertex 4 (additional tag 2)
+   * <p>
    * emit(element) and emit(dstVertexId, element) used together. emit(element) routes results to main output children,
    * and emit(dstVertexId, element) routes results to corresponding additional output children.
    */
-  @Test(timeout=5000)
+  @Test(timeout = 5000)
   public void testAdditionalOutputs() throws Exception {
     final String additionalTag1 = "bonus1";
     final String additionalTag2 = "bonus2";
 
     final IRVertex routerVertex = new OperatorVertex(
       new RoutingTransform(Arrays.asList(additionalTag1, additionalTag2)));
-    final IRVertex mainVertex= new OperatorVertex(new StreamTransform());
+    final IRVertex mainVertex = new OperatorVertex(new StreamTransform());
     final IRVertex bonusVertex1 = new OperatorVertex(new StreamTransform());
     final IRVertex bonusVertex2 = new OperatorVertex(new StreamTransform());
 
@@ -491,27 +490,27 @@
     edge3.getExecutionProperties().put(AdditionalOutputTagProperty.of(additionalTag2));
 
     final DAG<IRVertex, RuntimeEdge<IRVertex>> taskDag = new DAGBuilder<IRVertex, RuntimeEdge<IRVertex>>()
-        .addVertex(routerVertex)
-        .addVertex(mainVertex)
-        .addVertex(bonusVertex1)
-        .addVertex(bonusVertex2)
-        .connectVertices(edge1)
-        .connectVertices(edge2)
-        .connectVertices(edge3)
-        .buildWithoutSourceSinkCheck();
+      .addVertex(routerVertex)
+      .addVertex(mainVertex)
+      .addVertex(bonusVertex1)
+      .addVertex(bonusVertex2)
+      .connectVertices(edge1)
+      .connectVertices(edge2)
+      .connectVertices(edge3)
+      .buildWithoutSourceSinkCheck();
 
     final StageEdge outEdge1 = mockStageEdgeFrom(mainVertex);
     final StageEdge outEdge2 = mockStageEdgeFrom(bonusVertex1);
     final StageEdge outEdge3 = mockStageEdgeFrom(bonusVertex2);
 
     final Task task = new Task(
-        "testAdditionalOutputs",
-        generateTaskId(),
-        TASK_EXECUTION_PROPERTY_MAP,
-        new byte[0],
-        Collections.singletonList(mockStageEdgeTo(routerVertex)),
-        Arrays.asList(outEdge1, outEdge2, outEdge3),
-        Collections.emptyMap());
+      "testAdditionalOutputs",
+      generateTaskId(),
+      TASK_EXECUTION_PROPERTY_MAP,
+      new byte[0],
+      Collections.singletonList(mockStageEdgeTo(routerVertex)),
+      Arrays.asList(outEdge1, outEdge2, outEdge3),
+      Collections.emptyMap());
 
     // Execute the task.
     final TaskExecutor taskExecutor = getTaskExecutor(task, taskDag);
@@ -540,11 +539,11 @@
 
   private StageEdge mockStageEdgeFrom(final IRVertex irVertex) {
     return new StageEdge("SEdge" + RUNTIME_EDGE_ID.getAndIncrement(),
-        ExecutionPropertyMap.of(mock(IREdge.class), CommunicationPatternProperty.Value.OneToOne),
-        irVertex,
-        new OperatorVertex(new StreamTransform()),
-        mock(Stage.class),
-        mock(Stage.class));
+      ExecutionPropertyMap.of(mock(IREdge.class), CommunicationPatternProperty.Value.OneToOne),
+      irVertex,
+      new OperatorVertex(new StreamTransform()),
+      mock(Stage.class),
+      mock(Stage.class));
   }
 
   private StageEdge mockStageEdgeTo(final IRVertex irVertex) {
@@ -569,8 +568,8 @@
       final int elementsPerSource = DATA_SIZE / SOURCE_PARALLELISM;
       for (int i = 0; i < SOURCE_PARALLELISM; i++) {
         inputFutures.add(CompletableFuture.completedFuture(
-            DataUtil.IteratorWithNumBytes.of(elements.subList(i * elementsPerSource, (i + 1) * elementsPerSource)
-                .iterator())));
+          DataUtil.IteratorWithNumBytes.of(elements.subList(i * elementsPerSource, (i + 1) * elementsPerSource)
+            .iterator())));
       }
       final InputReader inputReader = mock(InputReader.class);
       final IRVertex srcVertex = (IRVertex) invocationOnMock.getArgument(1);
@@ -608,6 +607,7 @@
   /**
    * This transform does not emit watermark to OutputWriter
    * because OutputWriter currently does not support watermarks (TODO #245)
+   *
    * @param <T> type
    */
   private class StreamTransformNoWatermarkEmit<T> implements Transform<T, T> {
@@ -734,6 +734,7 @@
 
   /**
    * Simple identity function for testing.
+   *
    * @param <T> input/output type.
    */
   private class StreamTransform<T> implements Transform<T, T> {
@@ -762,6 +763,7 @@
 
   /**
    * Creates a view.
+   *
    * @param <T> input type.
    */
   private class CreateSingleListTransform<T> implements Transform<T, List<T>> {
@@ -792,6 +794,7 @@
 
   /**
    * Pairs data element with a broadcast variable.
+   *
    * @param <T> input/output type.
    */
   private class BroadcastVariablePairingTransform<T> implements Transform<T, T> {
@@ -867,6 +870,7 @@
 
   /**
    * Gets a list of integer pair elements in range.
+   *
    * @param start value of the range (inclusive).
    * @param end   value of the range (exclusive).
    * @return the list of elements.
diff --git a/runtime/master/pom.xml b/runtime/master/pom.xml
index 0a5058a..af59787 100644
--- a/runtime/master/pom.xml
+++ b/runtime/master/pom.xml
@@ -17,79 +17,80 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-runtime</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-runtime</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
 
-    <artifactId>nemo-runtime-master</artifactId>
-    <name>Nemo Runtime Master</name>
+  <artifactId>nemo-runtime-master</artifactId>
+  <name>Nemo Runtime Master</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-runtime-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-runtime-test</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-server</artifactId>
-            <version>${jetty-server.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-servlet</artifactId>
-            <version>${jetty-servlet.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.eclipse.jetty.websocket</groupId>
-            <artifactId>websocket-api</artifactId>
-            <version>${jetty-servlet.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.eclipse.jetty.websocket</groupId>
-            <artifactId>websocket-server</artifactId>
-            <version>${jetty-servlet.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-core</artifactId>
-            <version>${jackson.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-databind</artifactId>
-            <version>${jackson.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.xerial</groupId>
-            <artifactId>sqlite-jdbc</artifactId>
-            <version>${sqlite-jdbc.version}</version>
-        </dependency>
-        <dependency>
-            <!--
-            This is needed to view the logs when running unit tests.
-            See https://dzone.com/articles/how-configure-slf4j-different for details.
-            -->
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-simple</artifactId>
-            <version>1.6.2</version>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-runtime-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-runtime-test</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-server</artifactId>
+      <version>${jetty-server.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-servlet</artifactId>
+      <version>${jetty-servlet.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty.websocket</groupId>
+      <artifactId>websocket-api</artifactId>
+      <version>${jetty-servlet.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty.websocket</groupId>
+      <artifactId>websocket-server</artifactId>
+      <version>${jetty-servlet.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-core</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-databind</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.xerial</groupId>
+      <artifactId>sqlite-jdbc</artifactId>
+      <version>${sqlite-jdbc.version}</version>
+    </dependency>
+    <dependency>
+      <!--
+      This is needed to view the logs when running unit tests.
+      See https://dzone.com/articles/how-configure-slf4j-different for details.
+      -->
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-simple</artifactId>
+      <version>1.6.2</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
 </project>
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/BlockManagerMaster.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/BlockManagerMaster.java
index 1c768e1..410ae71 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/BlockManagerMaster.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/BlockManagerMaster.java
@@ -18,17 +18,19 @@
  */
 package org.apache.nemo.runtime.master;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.nemo.common.exception.IllegalMessageException;
 import org.apache.nemo.common.exception.UnknownExecutionStateException;
+import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.exception.AbsentBlockException;
-import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.message.MessageContext;
 import org.apache.nemo.runtime.common.message.MessageEnvironment;
 import org.apache.nemo.runtime.common.message.MessageListener;
 import org.apache.nemo.runtime.common.state.BlockState;
-
-import com.google.common.annotations.VisibleForTesting;
+import org.apache.reef.annotations.audience.DriverSide;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.annotation.Nullable;
 import javax.annotation.concurrent.ThreadSafe;
@@ -43,10 +45,6 @@
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 import java.util.stream.Collectors;
 
-import org.apache.reef.annotations.audience.DriverSide;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * Master-side block manager.
  * This implementation assumes that only a single user application can submit (maybe multiple) plans through
@@ -75,6 +73,7 @@
 
   /**
    * Constructor.
+   *
    * @param masterMessageEnvironment the message environment.
    */
   @Inject
@@ -140,8 +139,9 @@
 
   /**
    * Get handlers of blocks that are in a particular state.
+   *
    * @param blockIdOrWildcard to query
-   * @param state of the block
+   * @param state             of the block
    * @return the handlers, empty if none matches.
    */
   public List<BlockRequestHandler> getBlockHandlers(final String blockIdOrWildcard,
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/BlockMetadata.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/BlockMetadata.java
index c04e6d9..11353a3 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/BlockMetadata.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/BlockMetadata.java
@@ -20,8 +20,8 @@
 
 import org.apache.nemo.common.StateMachine;
 import org.apache.nemo.common.exception.IllegalStateTransitionException;
-import org.apache.nemo.runtime.common.state.BlockState;
 import org.apache.nemo.runtime.common.exception.AbsentBlockException;
+import org.apache.nemo.runtime.common.state.BlockState;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/ClientRPC.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/ClientRPC.java
index 213f9a1..3405ca9 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/ClientRPC.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/ClientRPC.java
@@ -49,7 +49,7 @@
   private static final int RETRY_TIMEOUT = 100;
 
   private final Map<ControlMessage.ClientToDriverMessageType, EventHandler<ControlMessage.ClientToDriverMessage>>
-      handlers = new ConcurrentHashMap<>();
+    handlers = new ConcurrentHashMap<>();
   private final Transport transport;
   private final Link<ControlMessage.DriverToClientMessage> link;
   private volatile boolean isClosed = false;
@@ -60,7 +60,7 @@
                     @Parameter(JobConf.ClientSideRPCServerHost.class) final String clientHost,
                     @Parameter(JobConf.ClientSideRPCServerPort.class) final int clientPort) {
     transport = transportFactory.newInstance(localAddressProvider.getLocalAddress(),
-        0, new SyncStage<>(new RPCEventHandler()), null, RETRY_COUNT, RETRY_TIMEOUT);
+      0, new SyncStage<>(new RPCEventHandler()), null, RETRY_COUNT, RETRY_TIMEOUT);
     final SocketAddress clientAddress = new InetSocketAddress(clientHost, clientPort);
     try {
       link = transport.open(clientAddress, ENCODER, LINK_LISTENER);
@@ -72,7 +72,8 @@
 
   /**
    * Registers handler for the given type of message.
-   * @param type the type of message
+   *
+   * @param type    the type of message
    * @param handler handler implementation
    * @return {@code this}
    */
@@ -100,6 +101,7 @@
 
   /**
    * Write message to client.
+   *
    * @param message message to send.
    */
   public void send(final ControlMessage.DriverToClientMessage message) {
@@ -109,6 +111,7 @@
 
   /**
    * Handles message from client.
+   *
    * @param message message to process
    */
   private void handleMessage(final ControlMessage.ClientToDriverMessage message) {
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/PipeManagerMaster.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/PipeManagerMaster.java
index f2c5d18..6721d97 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/PipeManagerMaster.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/PipeManagerMaster.java
@@ -53,6 +53,7 @@
 
   /**
    * Constructor.
+   *
    * @param masterMessageEnvironment the message environment.
    */
   @Inject
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/PlanStateManager.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/PlanStateManager.java
index 702e9ea..635df72 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/PlanStateManager.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/PlanStateManager.java
@@ -19,17 +19,28 @@
 package org.apache.nemo.runtime.master;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.apache.nemo.common.StateMachine;
 import org.apache.nemo.common.exception.IllegalStateTransitionException;
 import org.apache.nemo.common.exception.UnknownExecutionStateException;
-import org.apache.nemo.common.StateMachine;
 import org.apache.nemo.common.ir.vertex.executionproperty.ClonedSchedulingProperty;
 import org.apache.nemo.conf.JobConf;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
+import org.apache.nemo.runtime.common.metric.JobMetric;
+import org.apache.nemo.runtime.common.metric.StageMetric;
+import org.apache.nemo.runtime.common.metric.TaskMetric;
 import org.apache.nemo.runtime.common.plan.PhysicalPlan;
 import org.apache.nemo.runtime.common.plan.Stage;
 import org.apache.nemo.runtime.common.state.PlanState;
 import org.apache.nemo.runtime.common.state.StageState;
+import org.apache.nemo.runtime.common.state.TaskState;
+import org.apache.nemo.runtime.master.metric.MetricStore;
+import org.apache.reef.annotations.audience.DriverSide;
+import org.apache.reef.tang.annotations.Parameter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
+import javax.annotation.concurrent.ThreadSafe;
+import javax.inject.Inject;
 import java.io.File;
 import java.io.IOException;
 import java.io.PrintWriter;
@@ -40,26 +51,13 @@
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.stream.Collectors;
 
-import org.apache.nemo.runtime.common.state.TaskState;
-import org.apache.nemo.runtime.common.metric.JobMetric;
-import org.apache.nemo.runtime.common.metric.StageMetric;
-import org.apache.nemo.runtime.common.metric.TaskMetric;
-import org.apache.nemo.runtime.master.metric.MetricStore;
-import org.apache.reef.annotations.audience.DriverSide;
-import org.apache.reef.tang.annotations.Parameter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.annotation.concurrent.ThreadSafe;
-import javax.inject.Inject;
-
 import static org.apache.nemo.common.dag.DAG.EMPTY_DAG_DIRECTORY;
 
 /**
  * Maintains three levels of state machines (PlanState, StageState, and TaskState) of a physical plan.
  * The main API this class provides is onTaskStateReportFromExecutor(), which directly changes a TaskState.
  * PlanState and StageState are updated internally in the class, and can only be read from the outside.
- *
+ * <p>
  * (CONCURRENCY) The public methods of this class are synchronized.
  */
 @DriverSide
@@ -259,6 +257,7 @@
 
   /**
    * List of task times so far for this stage.
+   *
    * @param stageId of the stage.
    * @return a copy of the list, empty if none completed.
    */
@@ -268,8 +267,8 @@
   }
 
   /**
-   * @param stageId of the clone.
-   * @param taskIndex of the clone.
+   * @param stageId     of the clone.
+   * @param taskIndex   of the clone.
    * @param numOfClones of the clone.
    * @return true if the numOfClones has been modified, false otherwise
    */
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/RuntimeMaster.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/RuntimeMaster.java
index 119f23e..c8d165b 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/RuntimeMaster.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/RuntimeMaster.java
@@ -18,6 +18,8 @@
  */
 package org.apache.nemo.runtime.master;
 
+import com.fasterxml.jackson.core.TreeNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.protobuf.ByteString;
 import org.apache.commons.lang3.SerializationUtils;
 import org.apache.nemo.common.Pair;
@@ -36,12 +38,12 @@
 import org.apache.nemo.runtime.master.metric.MetricManagerMaster;
 import org.apache.nemo.runtime.master.metric.MetricMessageHandler;
 import org.apache.nemo.runtime.master.metric.MetricStore;
-import org.apache.nemo.runtime.master.scheduler.BatchScheduler;
-import org.apache.nemo.runtime.master.servlet.*;
 import org.apache.nemo.runtime.master.resource.ContainerManager;
 import org.apache.nemo.runtime.master.resource.ExecutorRepresenter;
 import org.apache.nemo.runtime.master.resource.ResourceSpecification;
+import org.apache.nemo.runtime.master.scheduler.BatchScheduler;
 import org.apache.nemo.runtime.master.scheduler.Scheduler;
+import org.apache.nemo.runtime.master.servlet.*;
 import org.apache.reef.annotations.audience.DriverSide;
 import org.apache.reef.driver.context.ActiveContext;
 import org.apache.reef.driver.evaluator.AllocatedEvaluator;
@@ -53,13 +55,13 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.fasterxml.jackson.core.TreeNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-
 import javax.inject.Inject;
-import java.nio.file.Paths;
 import java.io.Serializable;
-import java.util.*;
+import java.nio.file.Paths;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Optional;
+import java.util.Set;
 import java.util.concurrent.*;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -69,15 +71,15 @@
 /**
  * (WARNING) Use runtimeMasterThread for all public methods to avoid race conditions.
  * See comments in the {@link Scheduler} for avoiding race conditions.
- *
+ * <p>
  * Runtime Master is the central controller of Runtime.
  * Compiler submits an {@link PhysicalPlan} to Runtime Master to execute a job.
  * Runtime Master handles:
- *    a) Scheduling the plan with {@link Scheduler}.
- *    b) Managing resources with {@link ContainerManager}.
- *    c) Managing blocks with {@link BlockManagerMaster}.
- *    d) Receiving and sending control messages with {@link MessageEnvironment}.
- *    e) Metric using {@link MetricMessageHandler}.
+ * a) Scheduling the plan with {@link Scheduler}.
+ * b) Managing resources with {@link ContainerManager}.
+ * c) Managing blocks with {@link BlockManagerMaster}.
+ * d) Receiving and sending control messages with {@link MessageEnvironment}.
+ * e) Metric using {@link MetricMessageHandler}.
  */
 @DriverSide
 public final class RuntimeMaster {
@@ -129,7 +131,7 @@
     // compared to the job completion times of executed jobs
     // and keeping it single threaded removes the complexity of multi-thread synchronization.
     this.runtimeMasterThread =
-        Executors.newSingleThreadExecutor(runnable -> new Thread(runnable, "RuntimeMaster thread"));
+      Executors.newSingleThreadExecutor(runnable -> new Thread(runnable, "RuntimeMaster thread"));
 
     // Check for speculative execution every second.
     this.speculativeTaskCloningThread = Executors
@@ -145,7 +147,7 @@
     this.metricMessageHandler = metricMessageHandler;
     this.masterMessageEnvironment = masterMessageEnvironment;
     this.masterMessageEnvironment
-        .setupListener(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID, new MasterControlMessageReceiver());
+      .setupListener(MessageEnvironment.RUNTIME_MASTER_MESSAGE_LISTENER_ID, new MasterControlMessageReceiver());
     this.clientRPC = clientRPC;
     this.metricManagerMaster = metricManagerMaster;
     this.jobId = jobId;
@@ -163,6 +165,7 @@
 
   /**
    * Start Metric Server.
+   *
    * @return the metric server.
    */
   private Server startRestMetricServer() {
@@ -395,7 +398,7 @@
           break;
         default:
           throw new IllegalMessageException(
-              new Exception("This message should not be requested to Master :" + message.getType()));
+            new Exception("This message should not be requested to Master :" + message.getType()));
       }
     }
   }
@@ -404,14 +407,14 @@
     switch (message.getType()) {
       case TaskStateChanged:
         final ControlMessage.TaskStateChangedMsg taskStateChangedMsg
-            = message.getTaskStateChangedMsg();
+          = message.getTaskStateChangedMsg();
 
         scheduler.onTaskStateReportFromExecutor(taskStateChangedMsg.getExecutorId(),
-            taskStateChangedMsg.getTaskId(),
-            taskStateChangedMsg.getAttemptIdx(),
-            convertTaskState(taskStateChangedMsg.getState()),
-            taskStateChangedMsg.getVertexPutOnHoldId(),
-            convertFailureCause(taskStateChangedMsg.getFailureCause()));
+          taskStateChangedMsg.getTaskId(),
+          taskStateChangedMsg.getAttemptIdx(),
+          convertTaskState(taskStateChangedMsg.getState()),
+          taskStateChangedMsg.getVertexPutOnHoldId(),
+          convertFailureCause(taskStateChangedMsg.getFailureCause()));
         break;
       case ExecutorFailed:
         // Executor failed due to user code.
@@ -428,23 +431,23 @@
       case MetricMessageReceived:
         final List<ControlMessage.Metric> metricList = message.getMetricMsg().getMetricList();
         metricList.forEach(metric ->
-            metricMessageHandler.onMetricMessageReceived(
-                metric.getMetricType(), metric.getMetricId(),
-                metric.getMetricField(), metric.getMetricValue().toByteArray()));
+          metricMessageHandler.onMetricMessageReceived(
+            metric.getMetricType(), metric.getMetricId(),
+            metric.getMetricField(), metric.getMetricValue().toByteArray()));
         break;
       case ExecutorDataCollected:
         final String serializedData = message.getDataCollected().getData();
         clientRPC.send(ControlMessage.DriverToClientMessage.newBuilder()
-            .setType(ControlMessage.DriverToClientMessageType.DataCollected)
-            .setDataCollected(ControlMessage.DataCollectMessage.newBuilder().setData(serializedData).build())
-            .build());
+          .setType(ControlMessage.DriverToClientMessageType.DataCollected)
+          .setDataCollected(ControlMessage.DataCollectMessage.newBuilder().setData(serializedData).build())
+          .build());
         break;
       case MetricFlushed:
         metricCountDownLatch.countDown();
         break;
       default:
         throw new IllegalMessageException(
-            new Exception("This message should not be received by Master :" + message.getType()));
+          new Exception("This message should not be received by Master :" + message.getType()));
     }
   }
 
@@ -468,7 +471,7 @@
   }
 
   private TaskState.RecoverableTaskFailureCause convertFailureCause(
-      final ControlMessage.RecoverableFailureCause cause) {
+    final ControlMessage.RecoverableFailureCause cause) {
     switch (cause) {
       case InputReadFailure:
         return TaskState.RecoverableTaskFailureCause.INPUT_READ_FAILURE;
@@ -476,7 +479,7 @@
         return TaskState.RecoverableTaskFailureCause.OUTPUT_WRITE_FAILURE;
       default:
         throw new UnknownFailureCauseException(
-            new Throwable("The failure cause for the recoverable failure is unknown"));
+          new Throwable("The failure cause for the recoverable failure is unknown"));
     }
   }
 
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricBroadcaster.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricBroadcaster.java
index f2bca8d..2b32e94 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricBroadcaster.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricBroadcaster.java
@@ -33,13 +33,16 @@
 public final class MetricBroadcaster {
   private static final Logger LOG = LoggerFactory.getLogger(MetricBroadcaster.class.getName());
   private final Set<Session> sessions = ConcurrentHashMap.newKeySet();
+
   /**
    * Private constructor.
    */
-  private MetricBroadcaster() { }
+  private MetricBroadcaster() {
+  }
 
   /**
    * Getter for the singleton object.
+   *
    * @return MetricBroadcaster object.
    */
   public static MetricBroadcaster getInstance() {
@@ -55,6 +58,7 @@
 
   /**
    * Add a session to the session list.
+   *
    * @param session a WebSocket session.
    */
   public synchronized void addSession(final Session session) {
@@ -68,6 +72,7 @@
 
   /**
    * Remove a session from the session list.
+   *
    * @param session a WebSocket session.
    */
   public synchronized void removeSession(final Session session) {
@@ -76,6 +81,7 @@
 
   /**
    * Send text frame to each WebSocket session.
+   *
    * @param text text to send.
    */
   public void broadcast(final String text) {
@@ -90,6 +96,7 @@
 
   /**
    * Send binary frame to each WebSocket session.
+   *
    * @param bytes byte array to send.
    */
   public void broadcast(final byte[] bytes) {
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricManagerMaster.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricManagerMaster.java
index 6eabcbe..0a4f2fc 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricManagerMaster.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricManagerMaster.java
@@ -18,17 +18,17 @@
  */
 package org.apache.nemo.runtime.master.metric;
 
-import javax.inject.Inject;
-
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.message.MessageEnvironment;
-import org.apache.nemo.runtime.master.scheduler.ExecutorRegistry;
 import org.apache.nemo.runtime.common.metric.Metric;
+import org.apache.nemo.runtime.master.scheduler.ExecutorRegistry;
 import org.apache.reef.annotations.audience.DriverSide;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.inject.Inject;
+
 /**
  * A default metric message handler.
  */
@@ -49,10 +49,10 @@
   public synchronized void sendMetricFlushRequest() {
     executorRegistry.viewExecutors(executors -> executors.forEach(executor -> {
       final ControlMessage.Message message = ControlMessage.Message.newBuilder()
-          .setId(RuntimeIdManager.generateMessageId())
-          .setListenerId(MessageEnvironment.EXECUTOR_MESSAGE_LISTENER_ID)
-          .setType(ControlMessage.MessageType.RequestMetricFlush)
-          .build();
+        .setId(RuntimeIdManager.generateMessageId())
+        .setListenerId(MessageEnvironment.EXECUTOR_MESSAGE_LISTENER_ID)
+        .setType(ControlMessage.MessageType.RequestMetricFlush)
+        .build();
       executor.sendControlMessage(message);
     }));
   }
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricMessageHandler.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricMessageHandler.java
index b0ab1b5..c375e1c 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricMessageHandler.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricMessageHandler.java
@@ -28,8 +28,9 @@
 
   /**
    * Handle the received metric message.
-   * @param metricType a given type for the metric (ex. TaskMetric).
-   * @param metricId  id of the metric.
+   *
+   * @param metricType  a given type for the metric (ex. TaskMetric).
+   * @param metricId    id of the metric.
    * @param metricField field name of the metric.
    * @param metricValue serialized metric data value.
    */
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricStore.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricStore.java
index 160e8bf..c660b28 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricStore.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricStore.java
@@ -30,10 +30,18 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.*;
+import java.io.BufferedWriter;
+import java.io.ByteArrayOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
 import java.lang.management.ManagementFactory;
-import java.sql.*;
-import java.util.*;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.NoSuchElementException;
 
 /**
  * MetricStore stores metric data which will be used by web visualize interface, logging, and so on.
@@ -44,6 +52,7 @@
   private final Map<Class<? extends Metric>, Map<String, Object>> metricMap = new HashMap<>();
   // You can add more metrics by adding item to this metricList list.
   private final Map<String, Class<? extends Metric>> metricList = new HashMap<>();
+
   /**
    * Private constructor.
    */
@@ -55,6 +64,7 @@
 
   /**
    * Getter for singleton instance.
+   *
    * @return MetricStore object.
    */
   public static MetricStore getStore() {
@@ -79,8 +89,9 @@
   /**
    * Store a metric object. Metric object should implement {@link Metric} interface.
    * This method will store a metric into a {@link Map}, which have metric's id as its key.
+   *
    * @param metric metric object.
-   * @param <T> class of metric
+   * @param <T>    class of metric
    */
   public <T extends Metric> void putMetric(final T metric) {
     final Class<? extends Metric> metricClass = metric.getClass();
@@ -93,9 +104,10 @@
 
   /**
    * Fetch metric by its metric class instance and its id.
+   *
    * @param metricClass class instance of metric.
-   * @param id metric id, which can be fetched by getPlanId() method.
-   * @param <T> class of metric
+   * @param id          metric id, which can be fetched by getPlanId() method.
+   * @param <T>         class of metric
    * @return a metric object.
    */
   public <T extends Metric> T getMetricWithId(final Class<T> metricClass, final String id) {
@@ -108,8 +120,9 @@
 
   /**
    * Fetch metric map by its metric class instance.
+   *
    * @param metricClass class instance of metric.
-   * @param <T> class of metric
+   * @param <T>         class of metric
    * @return a metric object.
    */
   public <T extends Metric> Map<String, Object> getMetricMap(final Class<T> metricClass) {
@@ -119,9 +132,10 @@
   /**
    * Same as getMetricWithId(), but if there is no such metric, it will try to create new metric object
    * using its constructor, which takes an id as a parameter.
+   *
    * @param metricClass class of metric.
-   * @param id metric id, which can be fetched by getPlanId() method.
-   * @param <T> class of metric
+   * @param id          metric id, which can be fetched by getPlanId() method.
+   * @param <T>         class of metric
    * @return a metric object. If there was no such metric, newly create one.
    */
   public <T extends Metric> T getOrCreateMetric(final Class<T> metricClass, final String id) {
@@ -152,8 +166,9 @@
 
   /**
    * Dumps JSON-serialized string of specific metric.
+   *
    * @param metricClass class of metric.
-   * @param <T> type of the metric to dump
+   * @param <T>         type of the metric to dump
    * @return dumped JSON string of all metric.
    * @throws IOException when failed to write json.
    */
@@ -179,6 +194,7 @@
 
   /**
    * Dumps JSON-serialized string of all stored metric.
+   *
    * @return dumped JSON string of all metric.
    * @throws IOException when failed to write file.
    */
@@ -207,6 +223,7 @@
 
   /**
    * Same as dumpAllMetricToJson(), but this will save it to the file.
+   *
    * @param filePath path to dump JSON.
    */
   public void dumpAllMetricToFile(final String filePath) {
@@ -257,7 +274,8 @@
 
   /**
    * Save the job metrics for the optimization to the DB, in the form of LibSVM.
-   * @param c the connection to the DB.
+   *
+   * @param c      the connection to the DB.
    * @param syntax the db-specific syntax.
    */
   private void saveOptimizationMetrics(final Connection c, final String[] syntax) {
@@ -287,10 +305,10 @@
 
         try {
           statement.executeUpdate("CREATE TABLE IF NOT EXISTS " + tableName
-              + " (id " + syntax[0] + ", duration INTEGER NOT NULL, inputsize INTEGER NOT NULL, "
-              + "jvmmemsize BIGINT NOT NULL, memsize BIGINT NOT NULL, "
-              + "vertex_properties TEXT NOT NULL, edge_properties TEXT NOT NULL, "
-              + "created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP);");
+            + " (id " + syntax[0] + ", duration INTEGER NOT NULL, inputsize INTEGER NOT NULL, "
+            + "jvmmemsize BIGINT NOT NULL, memsize BIGINT NOT NULL, "
+            + "vertex_properties TEXT NOT NULL, edge_properties TEXT NOT NULL, "
+            + "created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP);");
           LOG.info("CREATED TABLE For {} IF NOT PRESENT", tableName);
 
           statement.executeUpdate("INSERT INTO " + tableName
@@ -312,9 +330,10 @@
    * Send changed metric data to {@link MetricBroadcaster}, which will broadcast it to
    * all active WebSocket sessions. This method should be called manually if you want to
    * send changed metric data to the frontend client. Also this method is synchronized.
+   *
    * @param metricClass class of the metric.
-   * @param id id of the metric.
-   * @param <T> type of the metric to broadcast
+   * @param id          id of the metric.
+   * @param <T>         type of the metric to broadcast
    */
   public synchronized <T extends Metric> void triggerBroadcast(final Class<T> metricClass, final String id) {
     final MetricBroadcaster metricBroadcaster = MetricBroadcaster.getInstance();
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/resource/ContainerManager.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/resource/ContainerManager.java
index 0f8ed01..0522b02 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/resource/ContainerManager.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/resource/ContainerManager.java
@@ -47,7 +47,7 @@
  * (WARNING) This class is not thread-safe.
  * Only a single thread should use the methods of this class.
  * (i.e., runtimeMasterThread in RuntimeMaster)
- *
+ * <p>
  * Encapsulates REEF's evaluator management for containers.
  * Serves as a single point of container management in Runtime.
  * We define a unit of resource a container (an evaluator in REEF), and launch a single executor on each container.
@@ -96,7 +96,8 @@
 
   /**
    * Requests containers/evaluators with the given specifications.
-   * @param numToRequest number of containers to request
+   *
+   * @param numToRequest          number of containers to request
    * @param resourceSpecification containing the specifications of
    */
   public void requestContainer(final int numToRequest, final ResourceSpecification resourceSpecification) {
@@ -115,17 +116,17 @@
       // Mark the request as pending with the given specifications.
       pendingContainerRequestsByContainerType.putIfAbsent(resourceSpecification.getContainerType(), new ArrayList<>());
       pendingContainerRequestsByContainerType.get(resourceSpecification.getContainerType())
-          .addAll(resourceSpecificationList);
+        .addAll(resourceSpecificationList);
 
       requestLatchByResourceSpecId.put(resourceSpecification.getResourceSpecId(),
-          new CountDownLatch(numToRequest));
+        new CountDownLatch(numToRequest));
 
       // Request the evaluators
       evaluatorRequestor.submit(EvaluatorRequest.newBuilder()
-          .setNumber(numToRequest)
-          .setMemory(resourceSpecification.getMemory())
-          .setNumberOfCores(resourceSpecification.getCapacity())
-          .build());
+        .setNumber(numToRequest)
+        .setMemory(resourceSpecification.getMemory())
+        .setNumberOfCores(resourceSpecification.getCapacity())
+        .build());
     } else {
       LOG.info("Request {} containers", numToRequest);
     }
@@ -133,8 +134,9 @@
 
   /**
    * Take the necessary actions in container manager once a container a is allocated.
-   * @param executorId of the executor to launch on this container.
-   * @param allocatedContainer the allocated container.
+   *
+   * @param executorId            of the executor to launch on this container.
+   * @param allocatedContainer    the allocated container.
    * @param executorConfiguration executor related configuration.
    */
   public void onContainerAllocated(final String executorId,
@@ -150,13 +152,13 @@
     evaluatorIdToResourceSpec.put(allocatedContainer.getId(), resourceSpecification);
 
     LOG.info("Container type (" + resourceSpecification.getContainerType()
-        + ") allocated, will be used for [" + executorId + "]");
+      + ") allocated, will be used for [" + executorId + "]");
     pendingContextIdToResourceSpec.put(executorId, resourceSpecification);
 
     // Poison handling
     final Configuration poisonConfiguration = Tang.Factory.getTang().newConfigurationBuilder()
-        .bindNamedParameter(JobConf.ExecutorPosionSec.class, String.valueOf(resourceSpecification.getPoisonSec()))
-        .build();
+      .bindNamedParameter(JobConf.ExecutorPosionSec.class, String.valueOf(resourceSpecification.getPoisonSec()))
+      .build();
 
     allocatedContainer.submitContext(Configurations.merge(executorConfiguration, poisonConfiguration));
   }
@@ -183,7 +185,7 @@
     MessageSender messageSender;
     try {
       messageSender =
-          messageEnvironment.asyncConnect(executorId, MessageEnvironment.EXECUTOR_MESSAGE_LISTENER_ID).get();
+        messageEnvironment.asyncConnect(executorId, MessageEnvironment.EXECUTOR_MESSAGE_LISTENER_ID).get();
     } catch (final InterruptedException | ExecutionException e) {
       // TODO #140: Properly classify and handle each RPC failure
       messageSender = new FailedMessageSender();
@@ -191,8 +193,8 @@
 
     // Create the executor representation.
     final ExecutorRepresenter executorRepresenter =
-        new ExecutorRepresenter(executorId, resourceSpec, messageSender, activeContext, serializationExecutorService,
-            activeContext.getEvaluatorDescriptor().getNodeDescriptor().getName());
+      new ExecutorRepresenter(executorId, resourceSpec, messageSender, activeContext, serializationExecutorService,
+        activeContext.getEvaluatorDescriptor().getNodeDescriptor().getName());
 
     requestLatchByResourceSpecId.get(resourceSpec.getResourceSpecId()).countDown();
 
@@ -201,6 +203,7 @@
 
   /**
    * Re-acquire a new container using the failed container's resource spec.
+   *
    * @param failedEvaluatorId of the failed evaluator
    * @return the resource specification of the failed evaluator
    */
@@ -223,12 +226,13 @@
   /**
    * Selects an executor specification for the executor to be launched on a container.
    * Important! This is a "hack" to get around the inability to mark evaluators with Node Labels in REEF.
+   *
    * @return the selected executor specification.
    */
   private ResourceSpecification selectResourceSpecForContainer() {
     ResourceSpecification selectedResourceSpec = null;
     for (final Map.Entry<String, List<ResourceSpecification>> entry
-        : pendingContainerRequestsByContainerType.entrySet()) {
+      : pendingContainerRequestsByContainerType.entrySet()) {
       if (entry.getValue().size() > 0) {
         selectedResourceSpec = entry.getValue().remove(0);
         break;
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/resource/ExecutorRepresenter.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/resource/ExecutorRepresenter.java
index bf6279c..dd7031b 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/resource/ExecutorRepresenter.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/resource/ExecutorRepresenter.java
@@ -21,33 +21,36 @@
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.protobuf.ByteString;
+import org.apache.commons.lang3.SerializationUtils;
 import org.apache.nemo.common.ir.vertex.executionproperty.ResourceSlotProperty;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.message.MessageEnvironment;
 import org.apache.nemo.runtime.common.message.MessageSender;
 import org.apache.nemo.runtime.common.plan.Task;
-import org.apache.commons.lang3.SerializationUtils;
 import org.apache.reef.driver.context.ActiveContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import javax.annotation.concurrent.NotThreadSafe;
-import java.util.*;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.ExecutorService;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
 /**
  * (WARNING) This class is not thread-safe, and thus should only be accessed through ExecutorRegistry.
- *
+ * <p>
  * Contains information/state regarding an executor.
  * Such information may include:
- *    a) The executor's resource type.
- *    b) The executor's capacity (ex. number of cores).
- *    c) Tasks scheduled/launched for the executor.
- *    d) Name of the physical node which hosts this executor.
- *    e) (Please add other information as we implement more features).
+ * a) The executor's resource type.
+ * b) The executor's capacity (ex. number of cores).
+ * c) Tasks scheduled/launched for the executor.
+ * d) Name of the physical node which hosts this executor.
+ * e) (Please add other information as we implement more features).
  */
 @NotThreadSafe
 public final class ExecutorRepresenter {
@@ -67,12 +70,13 @@
 
   /**
    * Creates a reference to the specified executor.
-   * @param executorId the executor id
-   * @param resourceSpecification specification for the executor
-   * @param messageSender provides communication context for this executor
-   * @param activeContext context on the corresponding REEF evaluator
+   *
+   * @param executorId                   the executor id
+   * @param resourceSpecification        specification for the executor
+   * @param messageSender                provides communication context for this executor
+   * @param activeContext                context on the corresponding REEF evaluator
    * @param serializationExecutorService provides threads for message serialization
-   * @param nodeName physical name of the node where this executor resides
+   * @param nodeName                     physical name of the node where this executor resides
    */
   public ExecutorRepresenter(final String executorId,
                              final ResourceSpecification resourceSpecification,
@@ -102,7 +106,7 @@
     failedTasks.addAll(runningComplyingTasks.values());
     failedTasks.addAll(runningNonComplyingTasks.values());
     final Set<String> taskIds = Stream.concat(runningComplyingTasks.keySet().stream(),
-        runningNonComplyingTasks.keySet().stream()).collect(Collectors.toSet());
+      runningNonComplyingTasks.keySet().stream()).collect(Collectors.toSet());
     runningComplyingTasks.clear();
     runningNonComplyingTasks.clear();
     return taskIds;
@@ -110,11 +114,12 @@
 
   /**
    * Marks the Task as running, and sends scheduling message to the executor.
+   *
    * @param task the task to run
    */
   public void onTaskScheduled(final Task task) {
     (task.getPropertyValue(ResourceSlotProperty.class).orElse(true)
-        ? runningComplyingTasks : runningNonComplyingTasks).put(task.getTaskId(), task);
+      ? runningComplyingTasks : runningNonComplyingTasks).put(task.getTaskId(), task);
     runningTaskToAttempt.put(task, task.getAttemptIdx());
     failedTasks.remove(task);
 
@@ -136,6 +141,7 @@
 
   /**
    * Sends control message to the executor.
+   *
    * @param message Message object to send
    */
   public void sendControlMessage(final ControlMessage.Message message) {
@@ -144,6 +150,7 @@
 
   /**
    * Marks the specified Task as completed.
+   *
    * @param taskId id of the completed task
    */
   public void onTaskExecutionComplete(final String taskId) {
@@ -154,6 +161,7 @@
 
   /**
    * Marks the specified Task as failed.
+   *
    * @param taskId id of the Task
    */
   public void onTaskExecutionFailed(final String taskId) {
@@ -174,7 +182,7 @@
    */
   public Set<Task> getRunningTasks() {
     return Stream.concat(runningComplyingTasks.values().stream(),
-        runningNonComplyingTasks.values().stream()).collect(Collectors.toSet());
+      runningNonComplyingTasks.values().stream()).collect(Collectors.toSet());
   }
 
   /**
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/resource/ResourceSpecification.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/resource/ResourceSpecification.java
index 58fe251..6241165 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/resource/ResourceSpecification.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/resource/ResourceSpecification.java
@@ -75,7 +75,7 @@
 
   /**
    * @return -1   if this resource is not poisoned. (for all other normal cases)
-   *         &gt;= 0 the expected time to failure by poison. (for fault-handling tests)
+   * &gt;= 0 the expected time to failure by poison. (for fault-handling tests)
    */
   public int getPoisonSec() {
     return poisonSec;
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/BatchScheduler.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/BatchScheduler.java
index 6c633e5..b812136 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/BatchScheduler.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/BatchScheduler.java
@@ -19,8 +19,12 @@
 package org.apache.nemo.runtime.master.scheduler;
 
 import com.google.common.collect.Sets;
+import org.apache.commons.lang.mutable.MutableBoolean;
 import org.apache.nemo.common.Pair;
 import org.apache.nemo.common.dag.DAG;
+import org.apache.nemo.common.exception.UnknownExecutionStateException;
+import org.apache.nemo.common.exception.UnknownFailureCauseException;
+import org.apache.nemo.common.exception.UnrecoverableFailureException;
 import org.apache.nemo.common.ir.Readable;
 import org.apache.nemo.common.ir.edge.executionproperty.MessageIdEdgeProperty;
 import org.apache.nemo.common.ir.vertex.executionproperty.ClonedSchedulingProperty;
@@ -29,13 +33,14 @@
 import org.apache.nemo.runtime.common.RuntimeIdManager;
 import org.apache.nemo.runtime.common.plan.*;
 import org.apache.nemo.runtime.common.state.BlockState;
-import org.apache.nemo.runtime.common.state.TaskState;
-import org.apache.nemo.runtime.master.*;
-import org.apache.nemo.common.exception.*;
 import org.apache.nemo.runtime.common.state.StageState;
+import org.apache.nemo.runtime.common.state.TaskState;
+import org.apache.nemo.runtime.master.BlockManagerMaster;
+import org.apache.nemo.runtime.master.PlanAppender;
+import org.apache.nemo.runtime.master.PlanStateManager;
 import org.apache.nemo.runtime.master.resource.ExecutorRepresenter;
-import org.apache.commons.lang.mutable.MutableBoolean;
 import org.apache.reef.annotations.audience.DriverSide;
+import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import javax.annotation.Nullable;
@@ -45,12 +50,10 @@
 import java.util.function.Function;
 import java.util.stream.Collectors;
 
-import org.slf4j.Logger;
-
 /**
  * (CONCURRENCY) Only a single dedicated thread should use the public methods of this class.
  * (i.e., runtimeMasterThread in RuntimeMaster)
- *
+ * <p>
  * BatchScheduler receives a single {@link PhysicalPlan} to execute and schedules the Tasks.
  */
 @DriverSide
@@ -125,7 +128,7 @@
 
   /**
    * @param taskId that generated the message.
-   * @param data of the message.
+   * @param data   of the message.
    */
   public void onRunTimePassMessage(final String taskId, final Object data) {
     final Set<StageEdge> targetEdges = getEdgesToOptimize(taskId);
@@ -135,8 +138,8 @@
   /**
    * Action for after task execution is put on hold.
    *
-   * @param executorId       the ID of the executor.
-   * @param taskId           the ID of the task.
+   * @param executorId the ID of the executor.
+   * @param taskId     the ID of the task.
    */
   private void onTaskExecutionOnHold(final String executorId,
                                      final String taskId) {
@@ -466,7 +469,7 @@
    * Get the target edges of dynamic optimization.
    * The edges are annotated with {@link MessageIdEdgeProperty}, which are outgoing edges of
    * parents of the stage put on hold.
-   *
+   * <p>
    * See {@link org.apache.nemo.compiler.optimizer.pass.compiletime.reshaping.SkewReshapingPass}
    * for setting the target edges of dynamic optimization.
    *
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/ContainerTypeAwareSchedulingConstraint.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/ContainerTypeAwareSchedulingConstraint.java
index 075f082..7a46d27 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/ContainerTypeAwareSchedulingConstraint.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/ContainerTypeAwareSchedulingConstraint.java
@@ -38,8 +38,8 @@
   @Override
   public boolean testSchedulability(final ExecutorRepresenter executor, final Task task) {
     final String executorPlacementPropertyValue = task.getPropertyValue(ResourcePriorityProperty.class)
-        .orElse(ResourcePriorityProperty.NONE);
+      .orElse(ResourcePriorityProperty.NONE);
     return executorPlacementPropertyValue.equals(ResourcePriorityProperty.NONE) ? true
-        : executor.getContainerType().equals(executorPlacementPropertyValue);
+      : executor.getContainerType().equals(executorPlacementPropertyValue);
   }
 }
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/ExecutorRegistry.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/ExecutorRegistry.java
index cb8fc25..8395f01 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/ExecutorRegistry.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/ExecutorRegistry.java
@@ -26,7 +26,10 @@
 
 import javax.annotation.concurrent.ThreadSafe;
 import javax.inject.Inject;
-import java.util.*;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
 import java.util.function.BiFunction;
 import java.util.function.Consumer;
 import java.util.stream.Collectors;
@@ -68,8 +71,8 @@
   }
 
   synchronized void updateExecutor(
-      final String executorId,
-      final BiFunction<ExecutorRepresenter, ExecutorState, Pair<ExecutorRepresenter, ExecutorState>> updater) {
+    final String executorId,
+    final BiFunction<ExecutorRepresenter, ExecutorState, Pair<ExecutorRepresenter, ExecutorState>> updater) {
     final Pair<ExecutorRepresenter, ExecutorState> pair = executors.get(executorId);
     if (pair == null) {
       throw new IllegalArgumentException("Unknown executor id " + executorId);
@@ -87,6 +90,7 @@
 
   /**
    * Retrieves the executor to which the given task was scheduled.
+   *
    * @param taskId of the task to search.
    * @return the {@link ExecutorRepresenter} of the executor the task was scheduled to.
    */
@@ -104,10 +108,10 @@
 
   private Set<ExecutorRepresenter> getRunningExecutors() {
     return executors.values()
-        .stream()
-        .filter(pair -> pair.right().equals(ExecutorState.RUNNING))
-        .map(Pair::left)
-        .collect(Collectors.toSet());
+      .stream()
+      .filter(pair -> pair.right().equals(ExecutorState.RUNNING))
+      .map(Pair::left)
+      .collect(Collectors.toSet());
   }
 
   @Override
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/MinOccupancyFirstSchedulingPolicy.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/MinOccupancyFirstSchedulingPolicy.java
index 02860f3..b416445 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/MinOccupancyFirstSchedulingPolicy.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/MinOccupancyFirstSchedulingPolicy.java
@@ -24,7 +24,8 @@
 
 import javax.annotation.concurrent.ThreadSafe;
 import javax.inject.Inject;
-import java.util.*;
+import java.util.Collection;
+import java.util.OptionalInt;
 
 /**
  * This policy chooses a set of Executors, on which have minimum running Tasks.
@@ -40,7 +41,7 @@
   @Override
   public ExecutorRepresenter selectExecutor(final Collection<ExecutorRepresenter> executors, final Task task) {
     final OptionalInt minOccupancy =
-        executors.stream()
+      executors.stream()
         .map(executor -> executor.getNumOfRunningTasks())
         .mapToInt(i -> i).min();
 
@@ -49,8 +50,8 @@
     }
 
     return executors.stream()
-        .filter(executor -> executor.getNumOfRunningTasks() == minOccupancy.getAsInt())
-        .findFirst()
-        .orElseThrow(() -> new RuntimeException("No such executor"));
+      .filter(executor -> executor.getNumOfRunningTasks() == minOccupancy.getAsInt())
+      .findFirst()
+      .orElseThrow(() -> new RuntimeException("No such executor"));
   }
 }
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/NodeShareSchedulingConstraint.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/NodeShareSchedulingConstraint.java
index a34dc89..10fb8de 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/NodeShareSchedulingConstraint.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/NodeShareSchedulingConstraint.java
@@ -54,13 +54,13 @@
   @Override
   public boolean testSchedulability(final ExecutorRepresenter executor, final Task task) {
     final Map<String, Integer> propertyValue = task.getPropertyValue(ResourceSiteProperty.class)
-        .orElseThrow(() -> new RuntimeException("ResourceSiteProperty expected"));
+      .orElseThrow(() -> new RuntimeException("ResourceSiteProperty expected"));
     if (propertyValue.isEmpty()) {
       return true;
     }
     try {
       return executor.getNodeName().equals(
-          getNodeName(propertyValue, RuntimeIdManager.getIndexFromTaskId(task.getTaskId())));
+        getNodeName(propertyValue, RuntimeIdManager.getIndexFromTaskId(task.getTaskId())));
     } catch (final IllegalStateException e) {
       throw new RuntimeException(String.format("Cannot schedule %s", task.getTaskId(), e));
     }
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/PendingTaskCollectionPointer.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/PendingTaskCollectionPointer.java
index af34463..575e9a6 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/PendingTaskCollectionPointer.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/PendingTaskCollectionPointer.java
@@ -18,9 +18,8 @@
  */
 package org.apache.nemo.runtime.master.scheduler;
 
-import org.apache.nemo.runtime.common.plan.Task;
-
 import net.jcip.annotations.ThreadSafe;
+import org.apache.nemo.runtime.common.plan.Task;
 
 import javax.inject.Inject;
 import java.util.Collection;
@@ -41,6 +40,7 @@
 
   /**
    * This collection of tasks should take precedence over any previous collection of tasks.
+   *
    * @param newCollection to schedule.
    */
   synchronized void setToOverwrite(final Collection<Task> newCollection) {
@@ -49,6 +49,7 @@
 
   /**
    * This collection of tasks can be scheduled only if there's no collection of tasks to schedule at the moment.
+   *
    * @param newCollection to schedule
    */
   synchronized void setIfNull(final Collection<Task> newCollection) {
@@ -59,6 +60,7 @@
 
   /**
    * Take the whole collection of tasks to schedule, and set the pointer to null.
+   *
    * @return optional tasks to schedule
    */
   synchronized Optional<Collection<Task>> getAndSetNull() {
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/Scheduler.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/Scheduler.java
index b8d00b3..cc4661d 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/Scheduler.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/Scheduler.java
@@ -70,7 +70,7 @@
    * @param taskId        of the Task whose state must be updated.
    * @param newState      for the Task.
    * @param attemptIdx    the number of times this Task has executed.
-   *************** the below parameters are only valid for failures *****************
+   *                      ************** the below parameters are only valid for failures *****************
    * @param taskPutOnHold the ID of task that are put on hold. It is null otherwise.
    * @param failureCause  for which the Task failed in the case of a recoverable failure.
    */
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingConstraintRegistry.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingConstraintRegistry.java
index d58e036..277dd50 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingConstraintRegistry.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingConstraintRegistry.java
@@ -40,11 +40,11 @@
 
   @Inject
   private SchedulingConstraintRegistry(
-      final ContainerTypeAwareSchedulingConstraint containerTypeAwareSchedulingConstraint,
-      final FreeSlotSchedulingConstraint freeSlotSchedulingConstraint,
-      final LocalitySchedulingConstraint localitySchedulingConstraint,
-      final AntiAffinitySchedulingConstraint antiAffinitySchedulingConstraint,
-      final NodeShareSchedulingConstraint nodeShareSchedulingConstraint) {
+    final ContainerTypeAwareSchedulingConstraint containerTypeAwareSchedulingConstraint,
+    final FreeSlotSchedulingConstraint freeSlotSchedulingConstraint,
+    final LocalitySchedulingConstraint localitySchedulingConstraint,
+    final AntiAffinitySchedulingConstraint antiAffinitySchedulingConstraint,
+    final NodeShareSchedulingConstraint nodeShareSchedulingConstraint) {
     registerSchedulingConstraint(containerTypeAwareSchedulingConstraint);
     registerSchedulingConstraint(freeSlotSchedulingConstraint);
     registerSchedulingConstraint(localitySchedulingConstraint);
@@ -54,26 +54,28 @@
 
   /**
    * Registers a {@link SchedulingConstraint}.
+   *
    * @param policy the policy to register
    */
   public void registerSchedulingConstraint(final SchedulingConstraint policy) {
     final AssociatedProperty associatedProperty = policy.getClass().getAnnotation(AssociatedProperty.class);
     if (associatedProperty == null || associatedProperty.value() == null) {
       throw new RuntimeException(String.format("SchedulingConstraint %s has no associated VertexExecutionProperty",
-          policy.getClass()));
+        policy.getClass()));
     }
     final Class<? extends ExecutionProperty> property = associatedProperty.value();
     if (typeToSchedulingConstraintMap.putIfAbsent(property, policy) != null) {
       throw new RuntimeException(String.format("Multiple SchedulingConstraint for VertexExecutionProperty %s:"
-          + "%s, %s", property, typeToSchedulingConstraintMap.get(property), policy));
+        + "%s, %s", property, typeToSchedulingConstraintMap.get(property), policy));
     }
   }
 
   /**
    * Returns {@link SchedulingConstraint} for the given {@link VertexExecutionProperty}.
+   *
    * @param propertyClass {@link VertexExecutionProperty} class
    * @return the corresponding {@link SchedulingConstraint} object,
-   *         or {@link Optional#EMPTY} if no such policy was found
+   * or {@link Optional#EMPTY} if no such policy was found
    */
   public Optional<SchedulingConstraint> get(final Class<? extends VertexExecutionProperty> propertyClass) {
     return Optional.ofNullable(typeToSchedulingConstraintMap.get(propertyClass));
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingPolicy.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingPolicy.java
index 0d0d3ca..76afbee 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingPolicy.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingPolicy.java
@@ -18,9 +18,9 @@
  */
 package org.apache.nemo.runtime.master.scheduler;
 
+import net.jcip.annotations.ThreadSafe;
 import org.apache.nemo.runtime.common.plan.Task;
 import org.apache.nemo.runtime.master.resource.ExecutorRepresenter;
-import net.jcip.annotations.ThreadSafe;
 import org.apache.reef.annotations.audience.DriverSide;
 import org.apache.reef.tang.annotations.DefaultImplementation;
 
@@ -39,7 +39,7 @@
    *
    * @param executors The collection of available executors.
    *                  Implementations can assume that the collection is not empty.
-   * @param task The task to schedule
+   * @param task      The task to schedule
    * @return The selected executor. It must be a member of {@code executors}.
    */
   ExecutorRepresenter selectExecutor(final Collection<ExecutorRepresenter> executors, final Task task);
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/StreamingScheduler.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/StreamingScheduler.java
index 71f1da2..24e30be 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/StreamingScheduler.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/StreamingScheduler.java
@@ -26,7 +26,8 @@
 import org.apache.nemo.runtime.common.plan.StageEdge;
 import org.apache.nemo.runtime.common.plan.Task;
 import org.apache.nemo.runtime.common.state.TaskState;
-import org.apache.nemo.runtime.master.*;
+import org.apache.nemo.runtime.master.PipeManagerMaster;
+import org.apache.nemo.runtime.master.PlanStateManager;
 import org.apache.nemo.runtime.master.resource.ExecutorRepresenter;
 import org.apache.reef.annotations.audience.DriverSide;
 import org.slf4j.Logger;
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/TaskDispatcher.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/TaskDispatcher.java
index c50f1e9..780e3ca 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/TaskDispatcher.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/TaskDispatcher.java
@@ -18,13 +18,17 @@
  */
 package org.apache.nemo.runtime.master.scheduler;
 
+import org.apache.commons.lang3.mutable.MutableObject;
 import org.apache.nemo.runtime.common.plan.Task;
 import org.apache.nemo.runtime.common.state.TaskState;
 import org.apache.nemo.runtime.master.PlanStateManager;
 import org.apache.nemo.runtime.master.resource.ExecutorRepresenter;
-import org.apache.commons.lang3.mutable.MutableObject;
 import org.apache.reef.annotations.audience.DriverSide;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
+import javax.annotation.concurrent.NotThreadSafe;
+import javax.inject.Inject;
 import java.util.*;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -33,12 +37,6 @@
 import java.util.concurrent.locks.ReentrantLock;
 import java.util.stream.Collectors;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.annotation.concurrent.NotThreadSafe;
-import javax.inject.Inject;
-
 /**
  * Dispatches tasks to executors in discrete batches (dispatch iterations).
  * A dispatch iteration occurs under one of the following conditions
@@ -68,7 +66,7 @@
                          final PlanStateManager planStateManager) {
     this.pendingTaskCollectionPointer = pendingTaskCollectionPointer;
     this.dispatcherThread = Executors.newSingleThreadExecutor(runnable ->
-        new Thread(runnable, "TaskDispatcher thread"));
+      new Thread(runnable, "TaskDispatcher thread"));
     this.planStateManager = planStateManager;
     this.isSchedulerRunning = false;
     this.isTerminated = false;
@@ -121,14 +119,14 @@
           final Optional<SchedulingConstraint> constraint = schedulingConstraintRegistry.get(property.getClass());
           if (constraint.isPresent() && !candidateExecutors.getValue().isEmpty()) {
             candidateExecutors.setValue(candidateExecutors.getValue().stream()
-                .filter(e -> constraint.get().testSchedulability(e, task))
-                .collect(Collectors.toSet()));
+              .filter(e -> constraint.get().testSchedulability(e, task))
+              .collect(Collectors.toSet()));
           }
         });
         if (!candidateExecutors.getValue().isEmpty()) {
           // Select executor
           final ExecutorRepresenter selectedExecutor
-              = schedulingPolicy.selectExecutor(candidateExecutors.getValue(), task);
+            = schedulingPolicy.selectExecutor(candidateExecutors.getValue(), task);
           // update metadata first
           planStateManager.onTaskStateChanged(task.getTaskId(), TaskState.State.EXECUTING);
 
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/AllMetricServlet.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/AllMetricServlet.java
index e09c579..25d15d2 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/AllMetricServlet.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/AllMetricServlet.java
@@ -32,7 +32,7 @@
 
   @Override
   protected void doGet(final HttpServletRequest request, final HttpServletResponse response)
-      throws IOException {
+    throws IOException {
     final MetricStore metricStore = MetricStore.getStore();
     response.setContentType("application/json");
     response.setStatus(HttpServletResponse.SC_OK);
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/JobMetricServlet.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/JobMetricServlet.java
index d965489..93cc9bb 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/JobMetricServlet.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/JobMetricServlet.java
@@ -18,8 +18,8 @@
  */
 package org.apache.nemo.runtime.master.servlet;
 
-import org.apache.nemo.runtime.master.metric.MetricStore;
 import org.apache.nemo.runtime.common.metric.JobMetric;
+import org.apache.nemo.runtime.master.metric.MetricStore;
 
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
@@ -33,7 +33,7 @@
 
   @Override
   protected void doGet(final HttpServletRequest request, final HttpServletResponse response)
-          throws IOException {
+    throws IOException {
     final MetricStore metricStore = MetricStore.getStore();
     response.setContentType("application/json");
     response.setStatus(HttpServletResponse.SC_OK);
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/StageMetricServlet.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/StageMetricServlet.java
index 5d77d46..74cb006 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/StageMetricServlet.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/StageMetricServlet.java
@@ -18,8 +18,8 @@
  */
 package org.apache.nemo.runtime.master.servlet;
 
-import org.apache.nemo.runtime.master.metric.MetricStore;
 import org.apache.nemo.runtime.common.metric.StageMetric;
+import org.apache.nemo.runtime.master.metric.MetricStore;
 
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
@@ -33,7 +33,7 @@
 
   @Override
   protected void doGet(final HttpServletRequest request, final HttpServletResponse response)
-      throws IOException {
+    throws IOException {
     final MetricStore metricStore = MetricStore.getStore();
     response.setContentType("application/json");
     response.setStatus(HttpServletResponse.SC_OK);
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/TaskMetricServlet.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/TaskMetricServlet.java
index f80033c..cd2ff57 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/TaskMetricServlet.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/TaskMetricServlet.java
@@ -18,8 +18,8 @@
  */
 package org.apache.nemo.runtime.master.servlet;
 
-import org.apache.nemo.runtime.master.metric.MetricStore;
 import org.apache.nemo.runtime.common.metric.TaskMetric;
+import org.apache.nemo.runtime.master.metric.MetricStore;
 
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
@@ -33,7 +33,7 @@
 
   @Override
   protected void doGet(final HttpServletRequest request, final HttpServletResponse response)
-      throws IOException {
+    throws IOException {
     final MetricStore metricStore = MetricStore.getStore();
     response.setContentType("application/json");
     response.setStatus(HttpServletResponse.SC_OK);
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/WebSocketMetricAdapter.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/WebSocketMetricAdapter.java
index 62821c6..939a8e1 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/WebSocketMetricAdapter.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/servlet/WebSocketMetricAdapter.java
@@ -22,12 +22,11 @@
 import org.eclipse.jetty.websocket.api.Session;
 import org.eclipse.jetty.websocket.api.StatusCode;
 import org.eclipse.jetty.websocket.api.WebSocketAdapter;
-
-import java.io.IOException;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+
 /**
  * Adapter for WebSocket metric request and response.
  */
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/BlockManagerMasterTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/BlockManagerMasterTest.java
index c726945..4fa68c8 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/BlockManagerMasterTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/BlockManagerMasterTest.java
@@ -33,9 +33,8 @@
 import java.util.List;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+
+import static org.junit.Assert.*;
 
 /**
  * Test for {@link BlockManagerMaster}.
@@ -48,21 +47,21 @@
   @Before
   public void setUp() throws Exception {
     final Injector injector = LocalMessageEnvironment.forkInjector(LocalMessageDispatcher.getInjector(),
-        MessageEnvironment.MASTER_COMMUNICATION_ID);
+      MessageEnvironment.MASTER_COMMUNICATION_ID);
     blockManagerMaster = injector.getInstance(BlockManagerMaster.class);
   }
 
   private static void checkInProgressToNotAvailableException(final Future<String> future,
                                                              final String expectedPartitionId,
                                                              final BlockState.State expectedState)
-      throws IllegalStateException, InterruptedException {
+    throws IllegalStateException, InterruptedException {
     assertTrue(future.isDone());
     try {
       future.get();
       throw new IllegalStateException("An ExecutionException was expected.");
     } catch (final ExecutionException executionException) {
       final AbsentBlockException absentBlockException
-          = (AbsentBlockException) executionException.getCause();
+        = (AbsentBlockException) executionException.getCause();
       assertEquals(expectedPartitionId, absentBlockException.getBlockId());
       assertEquals(expectedState, absentBlockException.getState());
     }
@@ -70,7 +69,7 @@
 
   private static void checkBlockLocation(final Future<String> future,
                                          final String expectedLocation)
-      throws InterruptedException, ExecutionException {
+    throws InterruptedException, ExecutionException {
     assertTrue(future.isDone());
     assertEquals(expectedLocation, future.get()); // must not throw any exception.
   }
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/ContainerManagerTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/ContainerManagerTest.java
index dc27880..d82ddd5 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/ContainerManagerTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/ContainerManagerTest.java
@@ -38,7 +38,7 @@
 
 import java.util.HashMap;
 import java.util.Map;
-import java.util.concurrent.*;
+import java.util.concurrent.Future;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import static org.junit.Assert.assertEquals;
@@ -51,11 +51,11 @@
  */
 public final class ContainerManagerTest {
   private static final ResourceSpecification RESOURCE_SPEC_A =
-      new ResourceSpecification(ResourcePriorityProperty.COMPUTE, 1, 1024);
+    new ResourceSpecification(ResourcePriorityProperty.COMPUTE, 1, 1024);
   private static final ResourceSpecification RESOURCE_SPEC_B =
-      new ResourceSpecification(ResourcePriorityProperty.TRANSIENT, 2, 2048);
+    new ResourceSpecification(ResourcePriorityProperty.TRANSIENT, 2, 2048);
   private static final ResourceSpecification RESOURCE_SPEC_C =
-      new ResourceSpecification(ResourcePriorityProperty.RESERVED, 3, 3072);
+    new ResourceSpecification(ResourcePriorityProperty.RESERVED, 3, 3072);
 
   private ContainerManager containerManager;
   private AtomicInteger testIdNumber = new AtomicInteger(0);
@@ -78,8 +78,8 @@
     final MessageEnvironment mockMsgEnv = mock(MessageEnvironment.class);
     when(mockMsgEnv.asyncConnect(anyString(), anyString())).thenReturn(mock(Future.class));
     final Configuration configuration = Tang.Factory.getTang().newConfigurationBuilder()
-        .bindNamedParameter(JobConf.ScheduleSerThread.class, "1")
-        .build();
+      .bindNamedParameter(JobConf.ScheduleSerThread.class, "1")
+      .build();
     final Injector injector = Tang.Factory.getTang().newInjector(configuration);
     injector.bindVolatileInstance(EvaluatorRequestor.class, mock(EvaluatorRequestor.class));
     injector.bindVolatileInstance(MessageEnvironment.class, mockMsgEnv);
@@ -106,11 +106,11 @@
         final EvaluatorDescriptor descriptor = createDescriptor(spec);
 
         containerManager.onContainerAllocated(
-            executorId,
-            createMockEvaluator(evaluatorId, descriptor),
-            createMockConfiguration());
+          executorId,
+          createMockEvaluator(evaluatorId, descriptor),
+          createMockConfiguration());
         final ExecutorRepresenter executorRepresenter =
-            containerManager.onContainerLaunched(createMockContext(executorId, descriptor)).get();
+          containerManager.onContainerLaunched(createMockContext(executorId, descriptor)).get();
         assertEquals(spec.getContainerType(), executorRepresenter.getContainerType());
         assertEquals(spec.getCapacity(), executorRepresenter.getExecutorCapacity());
         assertEquals(descriptor.getNodeDescriptor().getName(), executorRepresenter.getNodeName());
@@ -124,9 +124,9 @@
     final String evaluatorId = getEvaluatorId();
 
     containerManager.onContainerAllocated(
-        getExecutorId(),
-        createMockEvaluator(evaluatorId, createDescriptor(RESOURCE_SPEC_A)),
-        createMockConfiguration());
+      getExecutorId(),
+      createMockEvaluator(evaluatorId, createDescriptor(RESOURCE_SPEC_A)),
+      createMockConfiguration());
     assertEquals(RESOURCE_SPEC_A, containerManager.onContainerFailed(evaluatorId));
   }
 
@@ -138,9 +138,9 @@
     final EvaluatorDescriptor descriptor = createDescriptor(RESOURCE_SPEC_A);
 
     containerManager.onContainerAllocated(
-        executorId,
-        createMockEvaluator(evaluatorId, descriptor),
-        createMockConfiguration());
+      executorId,
+      createMockEvaluator(evaluatorId, descriptor),
+      createMockConfiguration());
     containerManager.onContainerLaunched(createMockContext(executorId, descriptor));
     assertEquals(RESOURCE_SPEC_A, containerManager.onContainerFailed(evaluatorId));
   }
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/PlanStateManagerTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/PlanStateManagerTest.java
index dedd2d9..5580108 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/PlanStateManagerTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/PlanStateManagerTest.java
@@ -25,10 +25,10 @@
 import org.apache.nemo.runtime.common.message.local.LocalMessageEnvironment;
 import org.apache.nemo.runtime.common.plan.PhysicalPlan;
 import org.apache.nemo.runtime.common.plan.Stage;
+import org.apache.nemo.runtime.common.plan.TestPlanGenerator;
 import org.apache.nemo.runtime.common.state.PlanState;
 import org.apache.nemo.runtime.common.state.StageState;
 import org.apache.nemo.runtime.common.state.TaskState;
-import org.apache.nemo.runtime.common.plan.TestPlanGenerator;
 import org.apache.nemo.runtime.master.metric.MetricMessageHandler;
 import org.apache.reef.tang.Injector;
 import org.junit.Before;
@@ -37,7 +37,7 @@
 import org.powermock.modules.junit4.PowerMockRunner;
 
 import java.util.List;
-import java.util.concurrent.*;
+import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 
 import static org.junit.Assert.assertEquals;
@@ -56,7 +56,7 @@
   @Before
   public void setUp() throws Exception {
     final Injector injector = LocalMessageEnvironment.forkInjector(LocalMessageDispatcher.getInjector(),
-        MessageEnvironment.MASTER_COMMUNICATION_ID);
+      MessageEnvironment.MASTER_COMMUNICATION_ID);
     metricMessageHandler = mock(MetricMessageHandler.class);
     injector.bindVolatileInstance(MetricMessageHandler.class, metricMessageHandler);
     injector.bindVolatileParameter(JobConf.DAGDirectory.class, "");
@@ -70,7 +70,7 @@
   @Test
   public void testPhysicalPlanStateChanges() throws Exception {
     final PhysicalPlan physicalPlan =
-        TestPlanGenerator.generatePhysicalPlan(TestPlanGenerator.PlanType.TwoVerticesJoined, false);
+      TestPlanGenerator.generatePhysicalPlan(TestPlanGenerator.PlanType.TwoVerticesJoined, false);
     planStateManager.updatePlan(physicalPlan, MAX_SCHEDULE_ATTEMPT);
 
     assertEquals(planStateManager.getPlanId(), "TestPlan");
@@ -101,7 +101,7 @@
   @Test(timeout = 4000)
   public void testWaitUntilFinish() throws Exception {
     final PhysicalPlan physicalPlan =
-        TestPlanGenerator.generatePhysicalPlan(TestPlanGenerator.PlanType.TwoVerticesJoined, false);
+      TestPlanGenerator.generatePhysicalPlan(TestPlanGenerator.PlanType.TwoVerticesJoined, false);
     planStateManager.updatePlan(physicalPlan, MAX_SCHEDULE_ATTEMPT);
 
     assertFalse(planStateManager.isPlanDone());
@@ -114,8 +114,8 @@
     // Complete the plan and check the result again.
     // It has to return COMPLETE.
     final List<String> tasks = physicalPlan.getStageDAG().getTopologicalSort().stream()
-        .flatMap(stage -> planStateManager.getTaskAttemptsToSchedule(stage.getId()).stream())
-        .collect(Collectors.toList());
+      .flatMap(stage -> planStateManager.getTaskAttemptsToSchedule(stage.getId()).stream())
+      .collect(Collectors.toList());
     tasks.forEach(taskId -> planStateManager.onTaskStateChanged(taskId, TaskState.State.EXECUTING));
     tasks.forEach(taskId -> planStateManager.onTaskStateChanged(taskId, TaskState.State.COMPLETE));
     final PlanState.State completedState = planStateManager.waitUntilFinish();
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/AntiAffinitySchedulingConstraintTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/AntiAffinitySchedulingConstraintTest.java
index f8e9a39..3487b4e 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/AntiAffinitySchedulingConstraintTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/AntiAffinitySchedulingConstraintTest.java
@@ -30,7 +30,9 @@
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 
-import java.util.*;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
 
 import static org.junit.Assert.assertEquals;
 import static org.mockito.Mockito.mock;
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/BatchSchedulerTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/BatchSchedulerTest.java
index 1092954..c980840 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/BatchSchedulerTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/BatchSchedulerTest.java
@@ -18,18 +18,18 @@
  */
 package org.apache.nemo.runtime.master.scheduler;
 
+import org.apache.nemo.common.dag.DAG;
 import org.apache.nemo.common.eventhandler.PubSubEventHandlerWrapper;
 import org.apache.nemo.common.ir.vertex.executionproperty.ResourcePriorityProperty;
 import org.apache.nemo.conf.JobConf;
 import org.apache.nemo.runtime.common.comm.ControlMessage;
 import org.apache.nemo.runtime.common.message.MessageSender;
 import org.apache.nemo.runtime.common.plan.*;
+import org.apache.nemo.runtime.master.BlockManagerMaster;
 import org.apache.nemo.runtime.master.PlanStateManager;
 import org.apache.nemo.runtime.master.metric.MetricMessageHandler;
-import org.apache.nemo.runtime.master.BlockManagerMaster;
 import org.apache.nemo.runtime.master.resource.ExecutorRepresenter;
 import org.apache.nemo.runtime.master.resource.ResourceSpecification;
-import org.apache.nemo.common.dag.DAG;
 import org.apache.reef.driver.context.ActiveContext;
 import org.apache.reef.tang.Injector;
 import org.apache.reef.tang.Tang;
@@ -43,7 +43,10 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.function.Function;
@@ -87,19 +90,19 @@
 
     final ExecutorService serializationExecutorService = Executors.newSingleThreadExecutor();
     final ResourceSpecification computeSpec =
-        new ResourceSpecification(ResourcePriorityProperty.COMPUTE, EXECUTOR_CAPACITY, 0);
+      new ResourceSpecification(ResourcePriorityProperty.COMPUTE, EXECUTOR_CAPACITY, 0);
     final Function<String, ExecutorRepresenter> computeSpecExecutorRepresenterGenerator = executorId ->
-        new ExecutorRepresenter(executorId, computeSpec, mockMsgSender, activeContext, serializationExecutorService,
-            executorId);
+      new ExecutorRepresenter(executorId, computeSpec, mockMsgSender, activeContext, serializationExecutorService,
+        executorId);
     final ExecutorRepresenter a3 = computeSpecExecutorRepresenterGenerator.apply("a3");
     final ExecutorRepresenter a2 = computeSpecExecutorRepresenterGenerator.apply("a2");
     final ExecutorRepresenter a1 = computeSpecExecutorRepresenterGenerator.apply("a1");
 
     final ResourceSpecification storageSpec =
-        new ResourceSpecification(ResourcePriorityProperty.TRANSIENT, EXECUTOR_CAPACITY, 0);
+      new ResourceSpecification(ResourcePriorityProperty.TRANSIENT, EXECUTOR_CAPACITY, 0);
     final Function<String, ExecutorRepresenter> storageSpecExecutorRepresenterGenerator = executorId ->
-        new ExecutorRepresenter(executorId, storageSpec, mockMsgSender, activeContext, serializationExecutorService,
-            executorId);
+      new ExecutorRepresenter(executorId, storageSpec, mockMsgSender, activeContext, serializationExecutorService,
+        executorId);
     final ExecutorRepresenter b2 = storageSpecExecutorRepresenterGenerator.apply("b2");
     final ExecutorRepresenter b1 = storageSpecExecutorRepresenterGenerator.apply("b1");
 
@@ -117,20 +120,20 @@
    * This method builds a physical DAG starting from an IR DAG and submits it to {@link BatchScheduler}.
    * Task state changes are explicitly submitted to scheduler instead of executor messages.
    */
-  @Test(timeout=10000)
+  @Test(timeout = 10000)
   public void testPull() throws Exception {
     scheduleAndCheckPlanTermination(
-        TestPlanGenerator.generatePhysicalPlan(TestPlanGenerator.PlanType.TwoVerticesJoined, false));
+      TestPlanGenerator.generatePhysicalPlan(TestPlanGenerator.PlanType.TwoVerticesJoined, false));
   }
 
   /**
    * This method builds a physical DAG starting from an IR DAG and submits it to {@link BatchScheduler}.
    * Task state changes are explicitly submitted to scheduler instead of executor messages.
    */
-  @Test(timeout=10000)
+  @Test(timeout = 10000)
   public void testPush() throws Exception {
     scheduleAndCheckPlanTermination(
-        TestPlanGenerator.generatePhysicalPlan(TestPlanGenerator.PlanType.TwoVerticesJoined, true));
+      TestPlanGenerator.generatePhysicalPlan(TestPlanGenerator.PlanType.TwoVerticesJoined, true));
   }
 
   private void scheduleAndCheckPlanTermination(final PhysicalPlan plan) throws InjectionException {
@@ -145,7 +148,7 @@
       LOG.debug("Checking that all stages of ScheduleGroup {} enter the executing state", scheduleGroupIdx);
       stages.forEach(stage -> {
         SchedulerTestUtil.completeStage(
-            planStateManager, scheduler, executorRegistry, stage, SCHEDULE_ATTEMPT_INDEX);
+          planStateManager, scheduler, executorRegistry, stage, SCHEDULE_ATTEMPT_INDEX);
       });
     }
 
@@ -156,9 +159,9 @@
   }
 
   private List<Stage> filterStagesWithAScheduleGroup(
-      final DAG<Stage, StageEdge> physicalDAG, final int scheduleGroup) {
+    final DAG<Stage, StageEdge> physicalDAG, final int scheduleGroup) {
     final Set<Stage> stageSet = new HashSet<>(physicalDAG.filterVertices(
-        stage -> stage.getScheduleGroup() == scheduleGroup));
+      stage -> stage.getScheduleGroup() == scheduleGroup));
 
     // Return the filtered vertices as a sorted list
     final List<Stage> sortedStages = new ArrayList<>(stageSet.size());
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/ContainerTypeAwareSchedulingConstraintTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/ContainerTypeAwareSchedulingConstraintTest.java
index ff005be..8ca378d 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/ContainerTypeAwareSchedulingConstraintTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/ContainerTypeAwareSchedulingConstraintTest.java
@@ -32,7 +32,8 @@
 import java.util.stream.Collectors;
 
 import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 /**
  * Tests {@link ContainerTypeAwareSchedulingConstraint}.
@@ -50,33 +51,34 @@
   @Test
   public void testContainerTypeAware() throws InjectionException {
     final SchedulingConstraint schedulingConstraint = Tang.Factory.getTang().newInjector()
-        .getInstance(ContainerTypeAwareSchedulingConstraint.class);
+      .getInstance(ContainerTypeAwareSchedulingConstraint.class);
     final ExecutorRepresenter a0 = mockExecutorRepresenter(ResourcePriorityProperty.TRANSIENT);
     final ExecutorRepresenter a1 = mockExecutorRepresenter(ResourcePriorityProperty.RESERVED);
     final ExecutorRepresenter a2 = mockExecutorRepresenter(ResourcePriorityProperty.NONE);
 
     final Task task1 = mock(Task.class);
     when(task1.getPropertyValue(ResourcePriorityProperty.class))
-        .thenReturn(Optional.of(ResourcePriorityProperty.RESERVED));
+      .thenReturn(Optional.of(ResourcePriorityProperty.RESERVED));
 
     final Set<ExecutorRepresenter> executorRepresenterList1 = new HashSet<>(Arrays.asList(a0, a1, a2));
 
     final Set<ExecutorRepresenter> candidateExecutors1 = executorRepresenterList1.stream()
-        .filter(e -> schedulingConstraint.testSchedulability(e, task1))
-        .collect(Collectors.toSet());;
+      .filter(e -> schedulingConstraint.testSchedulability(e, task1))
+      .collect(Collectors.toSet());
+    ;
 
     final Set<ExecutorRepresenter> expectedExecutors1 = Collections.singleton(a1);
     assertEquals(expectedExecutors1, candidateExecutors1);
 
     final Task task2 = mock(Task.class);
     when(task2.getPropertyValue(ResourcePriorityProperty.class))
-        .thenReturn(Optional.of(ResourcePriorityProperty.NONE));
+      .thenReturn(Optional.of(ResourcePriorityProperty.NONE));
 
     final Set<ExecutorRepresenter> executorRepresenterList2 = new HashSet<>(Arrays.asList(a0, a1, a2));
 
     final Set<ExecutorRepresenter> candidateExecutors2 = executorRepresenterList2.stream()
-        .filter(e -> schedulingConstraint.testSchedulability(e, task2))
-        .collect(Collectors.toSet());
+      .filter(e -> schedulingConstraint.testSchedulability(e, task2))
+      .collect(Collectors.toSet());
 
     final Set<ExecutorRepresenter> expectedExecutors2 = new HashSet<>(Arrays.asList(a0, a1, a2));
     assertEquals(expectedExecutors2, candidateExecutors2);
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/FreeSlotSchedulingConstraintTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/FreeSlotSchedulingConstraintTest.java
index b362edf..f6793a9 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/FreeSlotSchedulingConstraintTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/FreeSlotSchedulingConstraintTest.java
@@ -32,7 +32,8 @@
 import java.util.stream.Collectors;
 
 import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 /**
  * Tests {@link FreeSlotSchedulingConstraint}.
@@ -78,8 +79,8 @@
     final Set<ExecutorRepresenter> executorRepresenterList = new HashSet<>(Arrays.asList(a0, a1));
 
     final Set<ExecutorRepresenter> candidateExecutors = executorRepresenterList.stream()
-        .filter(e -> schedulingConstraint.testSchedulability(e, task))
-        .collect(Collectors.toSet());
+      .filter(e -> schedulingConstraint.testSchedulability(e, task))
+      .collect(Collectors.toSet());
 
     final Set<ExecutorRepresenter> expectedExecutors = Collections.singleton(a1);
     assertEquals(expectedExecutors, candidateExecutors);
@@ -97,8 +98,8 @@
     final Set<ExecutorRepresenter> executorRepresenterList = new HashSet<>(Arrays.asList(a0, a1));
 
     final Set<ExecutorRepresenter> candidateExecutors = executorRepresenterList.stream()
-        .filter(e -> schedulingConstraint.testSchedulability(e, task))
-        .collect(Collectors.toSet());
+      .filter(e -> schedulingConstraint.testSchedulability(e, task))
+      .collect(Collectors.toSet());
 
     final Set<ExecutorRepresenter> expectedExecutors = new HashSet<>(Arrays.asList(a0, a1));
     assertEquals(expectedExecutors, candidateExecutors);
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/LocalitySchedulingConstraintTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/LocalitySchedulingConstraintTest.java
index add63de..d096315 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/LocalitySchedulingConstraintTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/LocalitySchedulingConstraintTest.java
@@ -18,9 +18,9 @@
  */
 package org.apache.nemo.runtime.master.scheduler;
 
+import org.apache.nemo.common.ir.Readable;
 import org.apache.nemo.common.ir.vertex.executionproperty.ResourceLocalityProperty;
 import org.apache.nemo.runtime.common.plan.Task;
-import org.apache.nemo.common.ir.Readable;
 import org.apache.nemo.runtime.master.BlockManagerMaster;
 import org.apache.nemo.runtime.master.resource.ExecutorRepresenter;
 import org.apache.reef.tang.Injector;
@@ -38,7 +38,8 @@
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 /**
  * Test cases for {@link LocalitySchedulingConstraint}.
@@ -70,17 +71,17 @@
   @Test
   public void testSourceLocationAwareSchedulingNotAvailable() throws InjectionException {
     final SchedulingConstraint schedulingConstraint = injector
-        .getInstance(LocalitySchedulingConstraint.class);
+      .getInstance(LocalitySchedulingConstraint.class);
 
     // Prepare test scenario
     final Task task = CreateTask.withReadablesWithSourceLocations(
-        Collections.singletonList(Collections.singletonList(SITE_0)));
+      Collections.singletonList(Collections.singletonList(SITE_0)));
     final ExecutorRepresenter e0 = mockExecutorRepresenter(SITE_1);
     final ExecutorRepresenter e1 = mockExecutorRepresenter(SITE_1);
 
     assertEquals(Collections.emptySet(), Arrays.asList(e0, e1).stream()
-        .filter(e -> schedulingConstraint.testSchedulability(e, task))
-        .collect(Collectors.toSet()));
+      .filter(e -> schedulingConstraint.testSchedulability(e, task))
+      .collect(Collectors.toSet()));
   }
 
   /**
@@ -90,18 +91,18 @@
   @Test
   public void testSourceLocationAwareSchedulingWithMultiSource() throws InjectionException {
     final SchedulingConstraint schedulingConstraint = injector
-        .getInstance(LocalitySchedulingConstraint.class);
+      .getInstance(LocalitySchedulingConstraint.class);
     // Prepare test scenario
     final Task task0 = CreateTask.withReadablesWithSourceLocations(
-        Collections.singletonList(Collections.singletonList(SITE_1)));
+      Collections.singletonList(Collections.singletonList(SITE_1)));
     final Task task1 = CreateTask.withReadablesWithSourceLocations(
-        Collections.singletonList(Arrays.asList(SITE_0, SITE_1, SITE_2)));
+      Collections.singletonList(Arrays.asList(SITE_0, SITE_1, SITE_2)));
     final Task task2 = CreateTask.withReadablesWithSourceLocations(
-        Arrays.asList(Collections.singletonList(SITE_0), Collections.singletonList(SITE_1),
-            Arrays.asList(SITE_1, SITE_2)));
+      Arrays.asList(Collections.singletonList(SITE_0), Collections.singletonList(SITE_1),
+        Arrays.asList(SITE_1, SITE_2)));
     final Task task3 = CreateTask.withReadablesWithSourceLocations(
-        Arrays.asList(Collections.singletonList(SITE_1), Collections.singletonList(SITE_0),
-            Arrays.asList(SITE_0, SITE_2)));
+      Arrays.asList(Collections.singletonList(SITE_1), Collections.singletonList(SITE_0),
+        Arrays.asList(SITE_0, SITE_2)));
 
     final ExecutorRepresenter e = mockExecutorRepresenter(SITE_1);
     for (final Task task : new HashSet<>(Arrays.asList(task0, task1, task2, task3))) {
@@ -121,7 +122,7 @@
       final Task mockInstance = mock(Task.class);
       final Map<String, Readable> readableMap = new HashMap<>();
       readables.forEach(readable -> readableMap.put(String.format("TASK-%d", intraTaskIndex.getAndIncrement()),
-          readable));
+        readable));
       when(mockInstance.getTaskId()).thenReturn(String.format("T-%d", taskIndex.getAndIncrement()));
       when(mockInstance.getIrVertexIdToReadable()).thenReturn(readableMap);
       when(mockInstance.getPropertyValue(ResourceLocalityProperty.class)).thenReturn(Optional.of(true));
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/MinOccupancyFirstSchedulingPolicyTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/MinOccupancyFirstSchedulingPolicyTest.java
index 3f6f049..e709655 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/MinOccupancyFirstSchedulingPolicyTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/MinOccupancyFirstSchedulingPolicyTest.java
@@ -27,11 +27,15 @@
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 
-import java.util.*;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 import java.util.stream.IntStream;
 
 import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 /**
  * Tests {@link MinOccupancyFirstSchedulingPolicy}
@@ -57,7 +61,7 @@
   @Test
   public void test() throws InjectionException {
     final SchedulingPolicy schedulingPolicy = Tang.Factory.getTang().newInjector()
-        .getInstance(MinOccupancyFirstSchedulingPolicy.class);
+      .getInstance(MinOccupancyFirstSchedulingPolicy.class);
     final ExecutorRepresenter a0 = mockExecutorRepresenter(1);
     final ExecutorRepresenter a1 = mockExecutorRepresenter(2);
     final ExecutorRepresenter a2 = mockExecutorRepresenter(2);
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/PendingTaskCollectionPointerTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/PendingTaskCollectionPointerTest.java
index 7521443..2440279 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/PendingTaskCollectionPointerTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/PendingTaskCollectionPointerTest.java
@@ -32,9 +32,7 @@
 import java.util.List;
 import java.util.Optional;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.*;
 import static org.mockito.Mockito.mock;
 
 /**
@@ -53,7 +51,7 @@
   @Before
   public void setUp() throws InjectionException {
     this.pendingTaskCollectionPointer = Tang.Factory.getTang().newInjector()
-        .getInstance(PendingTaskCollectionPointer.class);
+      .getInstance(PendingTaskCollectionPointer.class);
   }
 
   @Test
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/SchedulerTestUtil.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/SchedulerTestUtil.java
index 87c3c53..94afbfd 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/SchedulerTestUtil.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/SchedulerTestUtil.java
@@ -32,10 +32,11 @@
 final class SchedulerTestUtil {
   /**
    * Complete the stage by completing all of its Tasks.
+   *
    * @param planStateManager for the submitted plan.
-   * @param scheduler for the submitted plan.
+   * @param scheduler        for the submitted plan.
    * @param executorRegistry provides executor representers
-   * @param stage for which the states should be marked as complete.
+   * @param stage            for which the states should be marked as complete.
    */
   static void completeStage(final PlanStateManager planStateManager,
                             final Scheduler scheduler,
@@ -53,7 +54,7 @@
           final TaskState.State taskState = planStateManager.getTaskState(taskId);
           if (TaskState.State.EXECUTING == taskState) {
             sendTaskStateEventToScheduler(scheduler, executorRegistry, taskId,
-                TaskState.State.COMPLETE, attemptIdx, null);
+              TaskState.State.COMPLETE, attemptIdx, null);
           } else if (TaskState.State.READY == taskState || TaskState.State.COMPLETE == taskState) {
             // Skip READY (try in the next loop and see if it becomes EXECUTING) and COMPLETE.
           } else {
@@ -69,11 +70,12 @@
   /**
    * Sends task state change event to scheduler.
    * This replaces executor's task completion messages for testing purposes.
-   * @param scheduler for the submitted plan.
+   *
+   * @param scheduler        for the submitted plan.
    * @param executorRegistry provides executor representers
-   * @param taskId for the task to change the state.
-   * @param newState for the task.
-   * @param cause in the case of a recoverable failure.
+   * @param taskId           for the task to change the state.
+   * @param newState         for the task.
+   * @param cause            in the case of a recoverable failure.
    */
   static void sendTaskStateEventToScheduler(final Scheduler scheduler,
                                             final ExecutorRegistry executorRegistry,
@@ -90,7 +92,7 @@
       }
     }
     scheduler.onTaskStateReportFromExecutor(scheduledExecutor.getExecutorId(), taskId, attemptIdx,
-        newState, null, cause);
+      newState, null, cause);
   }
 
   static void sendTaskStateEventToScheduler(final Scheduler scheduler,
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/SchedulingConstraintnRegistryTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/SchedulingConstraintnRegistryTest.java
index b022872..aae47da 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/SchedulingConstraintnRegistryTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/SchedulingConstraintnRegistryTest.java
@@ -19,9 +19,9 @@
 package org.apache.nemo.runtime.master.scheduler;
 
 import org.apache.nemo.common.ir.executionproperty.VertexExecutionProperty;
+import org.apache.nemo.common.ir.vertex.executionproperty.ResourceLocalityProperty;
 import org.apache.nemo.common.ir.vertex.executionproperty.ResourcePriorityProperty;
 import org.apache.nemo.common.ir.vertex.executionproperty.ResourceSlotProperty;
-import org.apache.nemo.common.ir.vertex.executionproperty.ResourceLocalityProperty;
 import org.apache.nemo.runtime.master.BlockManagerMaster;
 import org.apache.reef.tang.Injector;
 import org.apache.reef.tang.Tang;
@@ -45,19 +45,19 @@
     final Injector injector = Tang.Factory.getTang().newInjector();
     injector.bindVolatileInstance(BlockManagerMaster.class, mock(BlockManagerMaster.class));
     final SchedulingConstraintRegistry registry =
-        injector.getInstance(SchedulingConstraintRegistry.class);
+      injector.getInstance(SchedulingConstraintRegistry.class);
     assertEquals(FreeSlotSchedulingConstraint.class, getConstraintOf(ResourceSlotProperty.class, registry));
     assertEquals(ContainerTypeAwareSchedulingConstraint.class,
-        getConstraintOf(ResourcePriorityProperty.class, registry));
+      getConstraintOf(ResourcePriorityProperty.class, registry));
     assertEquals(LocalitySchedulingConstraint.class,
-        getConstraintOf(ResourceLocalityProperty.class, registry));
+      getConstraintOf(ResourceLocalityProperty.class, registry));
   }
 
   private static Class<? extends SchedulingConstraint> getConstraintOf(
-      final Class<? extends VertexExecutionProperty> property, final SchedulingConstraintRegistry registry) {
+    final Class<? extends VertexExecutionProperty> property, final SchedulingConstraintRegistry registry) {
     return registry.get(property)
-        .orElseThrow(() -> new RuntimeException(String.format(
-            "No SchedulingConstraint found for property %s", property)))
-        .getClass();
+      .orElseThrow(() -> new RuntimeException(String.format(
+        "No SchedulingConstraint found for property %s", property)))
+      .getClass();
   }
 }
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/StreamingSchedulerTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/StreamingSchedulerTest.java
index 8a2f425..7c1c296 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/StreamingSchedulerTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/StreamingSchedulerTest.java
@@ -37,9 +37,7 @@
 
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.ArgumentMatchers.argThat;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.*;
 
 /**
  * Tests {@link StreamingScheduler}.
@@ -78,7 +76,7 @@
       .collect(Collectors.toList());
   }
 
-  @Test(timeout=10000)
+  @Test(timeout = 10000)
   public void testScheduleEverything() throws Exception {
     final PhysicalPlan physicalPlan =
       TestPlanGenerator.generatePhysicalPlan(TestPlanGenerator.PlanType.TwoVerticesJoined, false);
diff --git a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/TaskRetryTest.java b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/TaskRetryTest.java
index e2e078e..bedf7d6 100644
--- a/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/TaskRetryTest.java
+++ b/runtime/master/src/test/java/org/apache/nemo/runtime/master/scheduler/TaskRetryTest.java
@@ -17,6 +17,7 @@
  * under the License.
  */
 package org.apache.nemo.runtime.master.scheduler;
+
 import org.apache.nemo.common.eventhandler.PubSubEventHandlerWrapper;
 import org.apache.nemo.common.ir.vertex.executionproperty.ResourcePriorityProperty;
 import org.apache.nemo.runtime.common.RuntimeIdManager;
@@ -27,15 +28,15 @@
 import org.apache.nemo.runtime.common.message.local.LocalMessageEnvironment;
 import org.apache.nemo.runtime.common.plan.PhysicalPlan;
 import org.apache.nemo.runtime.common.plan.PlanRewriter;
+import org.apache.nemo.runtime.common.plan.TestPlanGenerator;
 import org.apache.nemo.runtime.common.state.BlockState;
 import org.apache.nemo.runtime.common.state.PlanState;
 import org.apache.nemo.runtime.common.state.TaskState;
 import org.apache.nemo.runtime.master.BlockManagerMaster;
-import org.apache.nemo.runtime.master.metric.MetricMessageHandler;
 import org.apache.nemo.runtime.master.PlanStateManager;
+import org.apache.nemo.runtime.master.metric.MetricMessageHandler;
 import org.apache.nemo.runtime.master.resource.ExecutorRepresenter;
 import org.apache.nemo.runtime.master.resource.ResourceSpecification;
-import org.apache.nemo.runtime.common.plan.TestPlanGenerator;
 import org.apache.reef.driver.context.ActiveContext;
 import org.apache.reef.tang.Injector;
 import org.junit.Before;
@@ -64,9 +65,10 @@
  */
 @RunWith(PowerMockRunner.class)
 @PrepareForTest({BlockManagerMaster.class, TaskDispatcher.class, SchedulingConstraintRegistry.class,
-    PubSubEventHandlerWrapper.class})
+  PubSubEventHandlerWrapper.class})
 public final class TaskRetryTest {
-  @Rule public TestName testName = new TestName();
+  @Rule
+  public TestName testName = new TestName();
 
   private static final Logger LOG = LoggerFactory.getLogger(TaskRetryTest.class.getName());
   private static final AtomicInteger ID_OFFSET = new AtomicInteger(1);
@@ -96,7 +98,7 @@
     runPhysicalPlan(TestPlanGenerator.PlanType.TwoVerticesJoined, injector);
   }
 
-  @Test(timeout=60000)
+  @Test(timeout = 60000)
   public void testExecutorRemoved() throws Exception {
     // Until the plan finishes, events happen
     while (!planStateManager.isPlanDone()) {
@@ -122,7 +124,7 @@
     assertTrue(planStateManager.isPlanDone());
   }
 
-  @Test(timeout=60000)
+  @Test(timeout = 60000)
   public void testTaskOutputWriteFailure() throws Exception {
     // Three executors are used
     executorAdded(1.0);
@@ -163,7 +165,7 @@
     final ExecutorService serExecutorService = Executors.newSingleThreadExecutor();
     final ResourceSpecification computeSpec = new ResourceSpecification(ResourcePriorityProperty.COMPUTE, 2, 0);
     final ExecutorRepresenter executor = new ExecutorRepresenter("EXECUTOR" + ID_OFFSET.getAndIncrement(),
-        computeSpec, mockMsgSender, activeContext, serExecutorService, "NODE" + ID_OFFSET.getAndIncrement());
+      computeSpec, mockMsgSender, activeContext, serExecutorService, "NODE" + ID_OFFSET.getAndIncrement());
     scheduler.onExecutorAdded(executor);
   }
 
@@ -219,8 +221,8 @@
       final int randomIndex = random.nextInt(executingTasks.size());
       final String selectedTask = executingTasks.get(randomIndex);
       SchedulerTestUtil.sendTaskStateEventToScheduler(scheduler, executorRegistry, selectedTask,
-          TaskState.State.SHOULD_RETRY, RuntimeIdManager.getAttemptFromTaskId(selectedTask),
-          TaskState.RecoverableTaskFailureCause.OUTPUT_WRITE_FAILURE);
+        TaskState.State.SHOULD_RETRY, RuntimeIdManager.getAttemptFromTaskId(selectedTask),
+        TaskState.RecoverableTaskFailureCause.OUTPUT_WRITE_FAILURE);
     }
   }
 
@@ -228,11 +230,11 @@
 
   private List<String> getTasksInState(final PlanStateManager planStateManager, final TaskState.State state) {
     return planStateManager.getAllTaskAttemptIdsToItsState()
-        .entrySet()
-        .stream()
-        .filter(entry -> entry.getValue().equals(state))
-        .map(Map.Entry::getKey)
-        .collect(Collectors.toList());
+      .entrySet()
+      .stream()
+      .filter(entry -> entry.getValue().equals(state))
+      .map(Map.Entry::getKey)
+      .collect(Collectors.toList());
   }
 
   private void runPhysicalPlan(final TestPlanGenerator.PlanType planType,
diff --git a/runtime/pom.xml b/runtime/pom.xml
index 415cea6..b6cd38e 100644
--- a/runtime/pom.xml
+++ b/runtime/pom.xml
@@ -17,7 +17,8 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <parent>
     <artifactId>nemo-project</artifactId>
     <groupId>org.apache.nemo</groupId>
diff --git a/runtime/test/pom.xml b/runtime/test/pom.xml
index 187aa51..012b5cc 100644
--- a/runtime/test/pom.xml
+++ b/runtime/test/pom.xml
@@ -17,29 +17,30 @@
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
 
-    <parent>
-        <groupId>org.apache.nemo</groupId>
-        <artifactId>nemo-runtime</artifactId>
-        <version>0.2-SNAPSHOT</version>
-        <relativePath>../</relativePath>
-    </parent>
+  <parent>
+    <groupId>org.apache.nemo</groupId>
+    <artifactId>nemo-runtime</artifactId>
+    <version>0.2-SNAPSHOT</version>
+    <relativePath>../</relativePath>
+  </parent>
 
-    <artifactId>nemo-runtime-test</artifactId>
-    <name>Nemo Runtime Test</name>
+  <artifactId>nemo-runtime-test</artifactId>
+  <name>Nemo Runtime Test</name>
 
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-runtime-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nemo</groupId>
-            <artifactId>nemo-compiler-optimizer</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-    </dependencies>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-runtime-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.nemo</groupId>
+      <artifactId>nemo-compiler-optimizer</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
 </project>
diff --git a/runtime/test/src/main/java/org/apache/nemo/runtime/common/plan/TestPlanGenerator.java b/runtime/test/src/main/java/org/apache/nemo/runtime/common/plan/TestPlanGenerator.java
index 7d11ea6..453ffaf 100644
--- a/runtime/test/src/main/java/org/apache/nemo/runtime/common/plan/TestPlanGenerator.java
+++ b/runtime/test/src/main/java/org/apache/nemo/runtime/common/plan/TestPlanGenerator.java
@@ -71,7 +71,7 @@
 
   /**
    * @param planType type of the plan to generate.
-   * @param isPush whether to use the push policy.
+   * @param isPush   whether to use the push policy.
    * @return the generated plan.
    * @throws Exception exception.
    */
@@ -90,7 +90,7 @@
   }
 
   /**
-   * @param irDAG irDAG.
+   * @param irDAG  irDAG.
    * @param policy policy.
    * @return convert an IR into a physical plan using the given policy.
    * @throws Exception exception.