Joshfischer/fix website (#3225)
* adding correct gulp version
* fixing classpath error
* backtype package done
* fixining most javadoc warnings
* ignoring all errors of previous java docs
* correcting mktmp argument
* clean up
diff --git a/eco/src/java/org/apache/heron/eco/Eco.java b/eco/src/java/org/apache/heron/eco/Eco.java
index c5c2d45..617924d 100644
--- a/eco/src/java/org/apache/heron/eco/Eco.java
+++ b/eco/src/java/org/apache/heron/eco/Eco.java
@@ -65,7 +65,7 @@
* @param fileInputStream The input stream associated with ECO topology definition file
* @param propertiesFile The optional key-value property file for optional property substitution.
* @param envFilter The optional flag to tell ECO to perform environment variable substitution
- * @throws Exception
+ * @throws Exception the exception thrown
*/
public void submit(FileInputStream fileInputStream,
FileInputStream propertiesFile, boolean envFilter)
diff --git a/heron/api/src/java/org/apache/heron/streamlet/Builder.java b/heron/api/src/java/org/apache/heron/streamlet/Builder.java
index d6e4f70..f9a057c 100644
--- a/heron/api/src/java/org/apache/heron/streamlet/Builder.java
+++ b/heron/api/src/java/org/apache/heron/streamlet/Builder.java
@@ -43,7 +43,7 @@
* Creates a new Streamlet using the underlying generator
* @param generator The generator that generates the tuples of the streamlet
* @param <R>
- * @return
+ * @return the new streamlet
*/
<R> Streamlet<R> newSource(Source<R> generator);
@@ -51,7 +51,7 @@
* Creates a new Streamlet using the provided spout
* @param spout The spout that emits the tuples of the streamlet
* @param <R>
- * @return
+ * @return the new streamlet
*/
<R> Streamlet<R> newSource(IRichSpout spout);
}
diff --git a/heron/api/src/java/org/apache/heron/streamlet/impl/StreamletShadow.java b/heron/api/src/java/org/apache/heron/streamlet/impl/StreamletShadow.java
index 26e7eb6..633c4c6 100644
--- a/heron/api/src/java/org/apache/heron/streamlet/impl/StreamletShadow.java
+++ b/heron/api/src/java/org/apache/heron/streamlet/impl/StreamletShadow.java
@@ -44,7 +44,7 @@
* object(stream):
*
* StreamletImpl shadow = new StreamletShadow(stream) {
- * @Override
+ * Override
* public String getStreamId() {
* return "test";
* }
diff --git a/heron/metricsmgr/src/java/org/apache/heron/metricsmgr/MetricsUtil.java b/heron/metricsmgr/src/java/org/apache/heron/metricsmgr/MetricsUtil.java
index c8ec563..4fa76b9 100644
--- a/heron/metricsmgr/src/java/org/apache/heron/metricsmgr/MetricsUtil.java
+++ b/heron/metricsmgr/src/java/org/apache/heron/metricsmgr/MetricsUtil.java
@@ -36,7 +36,7 @@
* So splitting the source would be an array with 3 elements:
* ["host:port", componentName, instanceId]
* @param record
- * @return
+ * @return the split record source
*/
public static String[] splitRecordSource(MetricsRecord record) {
return record.getSource().split(SOURCE_DELIMITER);
diff --git a/heron/scheduler-core/src/java/org/apache/heron/scheduler/SchedulerMain.java b/heron/scheduler-core/src/java/org/apache/heron/scheduler/SchedulerMain.java
index 4301f1f..5c57e24 100644
--- a/heron/scheduler-core/src/java/org/apache/heron/scheduler/SchedulerMain.java
+++ b/heron/scheduler-core/src/java/org/apache/heron/scheduler/SchedulerMain.java
@@ -316,9 +316,9 @@
/**
* Get the http server for receiving scheduler requests
*
- * @param runtime, the runtime configuration
- * @param scheduler, an instance of the scheduler
- * @param port, the port for scheduler to listen on
+ * @param runtime the runtime configuration
+ * @param scheduler an instance of the scheduler
+ * @param port the port for scheduler to listen on
* @return an instance of the http server
*/
protected SchedulerServer getServer(
diff --git a/heron/scheduler-core/src/java/org/apache/heron/scheduler/SubmitterMain.java b/heron/scheduler-core/src/java/org/apache/heron/scheduler/SubmitterMain.java
index fbef628..6495505 100644
--- a/heron/scheduler-core/src/java/org/apache/heron/scheduler/SubmitterMain.java
+++ b/heron/scheduler-core/src/java/org/apache/heron/scheduler/SubmitterMain.java
@@ -66,11 +66,14 @@
/**
* Load the config parameters from the command line
*
- * @param cluster, name of the cluster
- * @param role, user role
- * @param environ, user provided environment/tag
- * @param verbose, enable verbose logging
- * @return config, the command line config
+ * @param cluster name of the cluster
+ * @param role user role
+ * @param environ user provided environment/tag
+ * @param submitUser the submit user
+ * @param dryRun run as dry run
+ * @param dryRunFormat the dry run format
+ * @param verbose enable verbose logging
+ * @return config the command line config
*/
protected static Config commandLineConfigs(String cluster,
String role,
diff --git a/heron/scheduler-core/src/java/org/apache/heron/scheduler/dryrun/DryRunRender.java b/heron/scheduler-core/src/java/org/apache/heron/scheduler/dryrun/DryRunRender.java
index 6091eec..042108e 100644
--- a/heron/scheduler-core/src/java/org/apache/heron/scheduler/dryrun/DryRunRender.java
+++ b/heron/scheduler-core/src/java/org/apache/heron/scheduler/dryrun/DryRunRender.java
@@ -31,7 +31,7 @@
/**
* Render dry-run response
- * @return
+ * @return the string
*/
String render();
}
diff --git a/heron/scheduler-core/src/java/org/apache/heron/scheduler/dryrun/FormatterUtils.java b/heron/scheduler-core/src/java/org/apache/heron/scheduler/dryrun/FormatterUtils.java
index 6781461..f71e661 100644
--- a/heron/scheduler-core/src/java/org/apache/heron/scheduler/dryrun/FormatterUtils.java
+++ b/heron/scheduler-core/src/java/org/apache/heron/scheduler/dryrun/FormatterUtils.java
@@ -148,7 +148,7 @@
/**
* Convert Cell to String
* @param rich if render in rich format
- * @return
+ * @return the cell string
*/
public String toString(boolean rich) {
StringBuilder builder = new StringBuilder();
diff --git a/heron/schedulers/src/java/org/apache/heron/scheduler/aurora/AuroraController.java b/heron/schedulers/src/java/org/apache/heron/scheduler/aurora/AuroraController.java
index ce15888..a703339 100644
--- a/heron/schedulers/src/java/org/apache/heron/scheduler/aurora/AuroraController.java
+++ b/heron/schedulers/src/java/org/apache/heron/scheduler/aurora/AuroraController.java
@@ -35,6 +35,7 @@
/**
* Restarts a given container, or the entire job if containerId is null
* @param containerId ID of container to restart, or entire job if null
+ * @return the boolean return value
*/
boolean restart(Integer containerId);
diff --git a/heron/spi/src/java/org/apache/heron/spi/scheduler/ILauncher.java b/heron/spi/src/java/org/apache/heron/spi/scheduler/ILauncher.java
index f40609b..5c655a9 100644
--- a/heron/spi/src/java/org/apache/heron/spi/scheduler/ILauncher.java
+++ b/heron/spi/src/java/org/apache/heron/spi/scheduler/ILauncher.java
@@ -35,6 +35,8 @@
* will be passed from submitter main. Config will contain information that launcher may use
* to setup scheduler and other parameters required by launcher to contact
* services which will launch scheduler.
+ * @param config the config
+ * @param runtime the runtime
*/
void initialize(Config config, Config runtime);
diff --git a/storm-compatibility/src/java/backtype/storm/task/GeneralTopologyContext.java b/storm-compatibility/src/java/backtype/storm/task/GeneralTopologyContext.java
index 3b39946..70458ee 100644
--- a/storm-compatibility/src/java/backtype/storm/task/GeneralTopologyContext.java
+++ b/storm-compatibility/src/java/backtype/storm/task/GeneralTopologyContext.java
@@ -82,6 +82,8 @@
/**
* Gets the set of streams declared for the specified component.
+ * @param componentId component id
+ * @return the set of streams
*/
public Set<String> getComponentStreams(String componentId) {
return delegate.getComponentStreams(componentId);
@@ -90,6 +92,8 @@
/**
* Gets the task ids allocated for the given component id. The task ids are
* always returned in ascending order.
+ * @param componentId the component id
+ * @return the task ids
*/
public List<Integer> getComponentTasks(String componentId) {
return delegate.getComponentTasks(componentId);
@@ -97,6 +101,9 @@
/**
* Gets the declared output fields for the specified component/stream.
+ * @param componentId the component id
+ * @param streamId the steam id
+ * @return the declared output fields
*/
public Fields getComponentOutputFields(String componentId, String streamId) {
return new Fields(delegate.getComponentOutputFields(componentId, streamId));
@@ -152,6 +159,7 @@
/**
* Gets a map from task id to component id.
+ * @return a map from task id to component id
*/
public Map<Integer, String> getTaskToComponent() {
return delegate.getTaskToComponent();
@@ -159,6 +167,7 @@
/**
* Gets a list of all component ids in this topology
+ * @return the list of component ids in this topology
*/
public Set<String> getComponentIds() {
return delegate.getComponentIds();
diff --git a/storm-compatibility/src/java/backtype/storm/task/IBolt.java b/storm-compatibility/src/java/backtype/storm/task/IBolt.java
index 9259e74..0d836de 100644
--- a/storm-compatibility/src/java/backtype/storm/task/IBolt.java
+++ b/storm-compatibility/src/java/backtype/storm/task/IBolt.java
@@ -28,18 +28,14 @@
* as output. An IBolt can do everything from filtering to joining to functions
* to aggregations. It does not have to process a tuple immediately and may
* hold onto tuples to process later.
- * <p>
* <p>A bolt's lifecycle is as follows:</p>
- * <p>
* <p>IBolt object created on client machine. The IBolt is serialized into the topology
* (using Java serialization) and submitted to the master machine of the cluster (Nimbus).
* Nimbus then launches workers which deserialize the object, call prepare on it, and then
* start processing tuples.</p>
- * <p>
* <p>If you want to parameterize an IBolt, you should set the parameter's through its
* constructor and save the parameterization state as instance variables (which will
* then get serialized and shipped to every task executing this bolt across the cluster).</p>
- * <p>
* <p>When defining bolts in Java, you should use the IRichBolt interface which adds
* necessary methods for using the Java TopologyBuilder API.</p>
*/
@@ -47,7 +43,6 @@
/**
* Called when a task for this component is initialized within a worker on the cluster.
* It provides the bolt with the environment in which the bolt executes.
- * <p>
* <p>This includes the:</p>
*
* @param stormConf The Storm configuration for this bolt. This is the configuration provided to the topology merged in with cluster configuration on this machine.
@@ -63,12 +58,10 @@
* be accessed using Tuple#getValue. The IBolt does not have to process the Tuple
* immediately. It is perfectly fine to hang onto a tuple and process it later
* (for instance, to do an aggregation or join).
- * <p>
* <p>Tuples should be emitted using the OutputCollector provided through the prepare method.
* It is required that all input tuples are acked or failed at some point using the OutputCollector.
* Otherwise, Storm will be unable to determine when tuples coming off the spouts
* have been completed.</p>
- * <p>
* <p>For the common case of acking an input tuple at the end of the execute method,
* see IBasicBolt which automates this.</p>
*
@@ -79,7 +72,6 @@
/**
* Called when an IBolt is going to be shutdown. There is no guarentee that cleanup
* will be called, because the supervisor kill -9's worker processes on the cluster.
- * <p>
* <p>The one context where cleanup is guaranteed to be called is when a topology
* is killed when running Storm in simulator.</p>
*/
diff --git a/storm-compatibility/src/java/backtype/storm/task/IOutputCollector.java b/storm-compatibility/src/java/backtype/storm/task/IOutputCollector.java
index 8930a0f..0691f66 100644
--- a/storm-compatibility/src/java/backtype/storm/task/IOutputCollector.java
+++ b/storm-compatibility/src/java/backtype/storm/task/IOutputCollector.java
@@ -26,6 +26,10 @@
public interface IOutputCollector extends IErrorReporter {
/**
* Returns the task ids that received the tuples.
+ * @param streamId the stream id
+ * @param anchors the anchors
+ * @param tuple the tuple
+ * @return the task ids that recieved the tuples
*/
List<Integer> emit(String streamId, Collection<Tuple> anchors, List<Object> tuple);
diff --git a/storm-compatibility/src/java/backtype/storm/task/OutputCollector.java b/storm-compatibility/src/java/backtype/storm/task/OutputCollector.java
index 1616fef..bb75dce 100644
--- a/storm-compatibility/src/java/backtype/storm/task/OutputCollector.java
+++ b/storm-compatibility/src/java/backtype/storm/task/OutputCollector.java
@@ -144,7 +144,6 @@
* as direct, or the target bolt subscribes with a non-direct grouping,
* an error will occur at runtime. The emitted values must be
* immutable.
- * <p>
* <p>The default stream must be declared as direct in the topology definition.
* See OutputDeclarer#declare for how this is done when defining topologies
* in Java.</p>
@@ -164,7 +163,6 @@
* as direct, or the target bolt subscribes with a non-direct grouping,
* an error will occur at runtime. The emitted values must be
* immutable.
- * <p>
* <p>The default stream must be declared as direct in the topology definition.
* See OutputDeclarer#declare for how this is done when defining topologies
* in Java.</p>
@@ -185,11 +183,9 @@
* as direct, or the target bolt subscribes with a non-direct grouping,
* an error will occur at runtime. The emitted values must be
* immutable.
- * <p>
* <p>The default stream must be declared as direct in the topology definition.
* See OutputDeclarer#declare for how this is done when defining topologies
* in Java.</p>
- * <p>
* <p>Note that this method does not use anchors, so downstream failures won't
* affect the failure status of any spout tuples.</p>
*
diff --git a/storm-compatibility/src/java/backtype/storm/task/SpoutTopologyContext.java b/storm-compatibility/src/java/backtype/storm/task/SpoutTopologyContext.java
index 0857615..c571590 100644
--- a/storm-compatibility/src/java/backtype/storm/task/SpoutTopologyContext.java
+++ b/storm-compatibility/src/java/backtype/storm/task/SpoutTopologyContext.java
@@ -30,6 +30,7 @@
/**
* Gets the Maximum Spout Pending value for this instance of spout.
+ * @return Maximum Spout Pending value for this instance of spout
*/
public Long getMaxSpoutPending() {
throw new RuntimeException("Heron does not support Auto MSP");
@@ -37,6 +38,7 @@
/**
* Sets the Maximum Spout Pending value for this instance of spout
+ * @param maxSpoutPending the max spout pending value
*/
public void setMaxSpoutPending(Long maxSpoutPending) {
throw new RuntimeException("Heron does not support Auto MSP");
diff --git a/storm-compatibility/src/java/backtype/storm/task/TopologyContext.java b/storm-compatibility/src/java/backtype/storm/task/TopologyContext.java
index e015710..652f704 100644
--- a/storm-compatibility/src/java/backtype/storm/task/TopologyContext.java
+++ b/storm-compatibility/src/java/backtype/storm/task/TopologyContext.java
@@ -149,6 +149,7 @@
/**
* Gets the component id for this task. The component id maps
* to a component id specified for a Spout or Bolt in the topology definition.
+ * @return the compoenent id
*/
public String getThisComponentId() {
return delegate.getThisComponentId();
@@ -158,6 +159,7 @@
* Gets the declared output fields for the specified stream id for the component
* this task is a part of.
* @param streamId The id of the output field.
+ * @return the declared output fields for the specified stream id
*/
public Fields getThisOutputFields(String streamId) {
return new Fields(delegate.getThisOutputFields(streamId));
@@ -165,6 +167,7 @@
/**
* Gets the set of streams declared for the component of this task.
+ * @return the set of streams
*/
public Set<String> getThisStreams() {
return delegate.getThisStreams();
@@ -174,6 +177,7 @@
* Gets the index of this task id in getComponentTasks(getThisComponentId()).
* An example use case for this method is determining which task
* accesses which resource in a distributed resource to ensure an even distribution.
+ * @return the task index
*/
public int getThisTaskIndex() {
return delegate.getThisTaskIndex();
diff --git a/storm-compatibility/src/java/backtype/storm/task/WorkerTopologyContext.java b/storm-compatibility/src/java/backtype/storm/task/WorkerTopologyContext.java
index 603ccf3..f07d2c3 100644
--- a/storm-compatibility/src/java/backtype/storm/task/WorkerTopologyContext.java
+++ b/storm-compatibility/src/java/backtype/storm/task/WorkerTopologyContext.java
@@ -59,6 +59,7 @@
* (including the task for this task).
* In Heron parlance, since every instance is running as a seperate process
* this will just return the current instance's taskId
+ * @return the worker task
*/
public List<Integer> getThisWorkerTasks() {
List<Integer> retval = new LinkedList<>();
@@ -72,6 +73,7 @@
* on the same machine.
* In Heron parlance, we just return the taskId since that
* should be unique
+ * @return the worker port
*/
public Integer getThisWorkerPort() {
return delegate.getThisTaskId();
@@ -81,6 +83,7 @@
* Gets the location of the external resources for this worker on the
* local filesystem. These external resources typically include bolts implemented
* in other languages, such as Ruby or Python.
+ * @return the location of the external resources
*/
public String getCodeDir() {
throw new RuntimeException("Not supported");
@@ -90,6 +93,7 @@
* If this task spawns any subprocesses, those subprocesses must immediately
* write their PID to this directory on the local filesystem to ensure that
* Storm properly destroys that process when the worker is shutdown.
+ * @return the PID directory
*/
public String getPIDDir() {
throw new RuntimeException("Not supported");
diff --git a/website/package.json b/website/package.json
index 3e4c2d7..cd1f248 100755
--- a/website/package.json
+++ b/website/package.json
@@ -2,7 +2,7 @@
"private": true,
"dependencies": {
"del": "^2.2.2",
- "gulp": "github:gulpjs/gulp#4.0",
+ "gulp": "^4.0.0",
"gulp-autoprefixer": "^4.0.0",
"gulp-hash": "^4.1.1",
"gulp-sass": "^3.1.0",
diff --git a/website/scripts/javadocs.sh b/website/scripts/javadocs.sh
index 3b7aa45..ccbfb63 100755
--- a/website/scripts/javadocs.sh
+++ b/website/scripts/javadocs.sh
@@ -18,7 +18,7 @@
set -e
JAVADOC=javadoc
-FLAGS="-quiet"
+FLAGS="-quiet -Xdoclint:none"
HERON_ROOT_DIR=$(git rev-parse --show-toplevel)
# for display on GitHub website
@@ -50,7 +50,7 @@
rm -rf $JAVADOC_OUTPUT_DIR
mkdir -p $JAVADOC_OUTPUT_DIR
-BIN_JARS=`find $HERON_ROOT_DIR/bazel-heron/_bin/. -name "*\.jar" | tr '\n' ':'`
+BIN_JARS=`find $HERON_ROOT_DIR/bazel-incubator-heron/_bin/. -name "*\.jar" | tr '\n' ':'`
GEN_JARS=`find $HERON_ROOT_DIR/bazel-genfiles/external/. -name "*\.jar" | tr '\n' ':'`
SCRIBE_JARS=`find $HERON_ROOT_DIR/bazel-bin/. -name "libthrift_scribe_java.jar" | tr '\n' ':'`
PROTO_JARS=`find $HERON_ROOT_DIR/bazel-bin/heron/proto/. -name "*\.jar" | tr '\n' ':'`
diff --git a/website/scripts/python-doc-gen.sh b/website/scripts/python-doc-gen.sh
index 75b6a34..35f8acc 100755
--- a/website/scripts/python-doc-gen.sh
+++ b/website/scripts/python-doc-gen.sh
@@ -20,15 +20,13 @@
HERONPY_VERSION=$1
HERON_ROOT_DIR=$(git rev-parse --show-toplevel)
INPUT=heronpy
-TMP_DIR=$(mktemp --directory)
-
-VENV="$(mktemp --directory)"
+TMP_DIR=$(mktemp -d)
+VENV="$(mktemp -d)"
virtualenv "$VENV"
source "$VENV/bin/activate"
# TODO: make this a virtualenv
pip install "heronpy==${HERONPY_VERSION}" "pdoc~=0.3.2"
pip install --ignore-installed six
-
mkdir -p static/api && rm -rf static/api/python
pdoc heronpy \