Merge branch 'master' of https://github.com/edwardyoon/incubator-horn
diff --git a/CHANGES.txt b/CHANGES.txt
new file mode 100644
index 0000000..0826b90
--- /dev/null
+++ b/CHANGES.txt
@@ -0,0 +1,11 @@
+Horn Change Log
+
+Release 0.1.0 (unreleased changes)
+
+  NEW FEATURES
+
+    HORN-1: Website for Apache Horn (Incubating) (Elmurod Talipov via edwardyoon)
+    
+  BUG FIXES
+
+  IMPROVEMENTS
diff --git a/src/main/java/org/apache/horn/distbelief/DistBeliefModelTrainer.java b/src/main/java/org/apache/horn/distbelief/DistBeliefModelTrainer.java
new file mode 100644
index 0000000..c7326ab
--- /dev/null
+++ b/src/main/java/org/apache/horn/distbelief/DistBeliefModelTrainer.java
@@ -0,0 +1,87 @@
+package org.apache.horn.distbelief;
+
+import java.io.IOException;
+
+import org.apache.hama.bsp.BSP;
+import org.apache.hama.bsp.BSPPeer;
+import org.apache.hama.bsp.sync.SyncException;
+
+/**
+ * This DistBeliefModelTrainer performs each SGD. 
+ */
+public class DistBeliefModelTrainer extends BSP {
+
+  private boolean isConverge = false;
+  private int iterations;
+  private int maxIterations;
+  
+  @Override
+  public final void setup(BSPPeer peer) {
+    // loads subset of neural network model replica into memory
+  }
+  
+  @Override
+  public void bsp(BSPPeer peer) throws IOException, SyncException,
+      InterruptedException {
+
+    // Iterate until reach max iteration or convergence
+    while (this.iterations++ < maxIterations) {
+      
+      // Fetch latest parameters
+      fetchParameters(peer);
+      
+      // Perform mini-batch
+      doMinibatch(peer);
+      
+      // Push parameters
+      pushParameters(peer);
+      
+      if (this.isConverge) {
+        break;
+      }
+    }
+    
+  }
+
+  /**
+   * Performs the mini-batch
+   * @param peer
+   */
+  private void doMinibatch(BSPPeer peer) {
+    double avgTrainingError = 0.0;
+    // 1. loads a next set of mini-batch instances from assigned splits into memory
+    
+    // 2. train incrementally from a mini-batch of instances
+    /*
+    for (Instance trainingInstance : MiniBatchSet) {
+      
+      // 2.1 upward propagation (start from the input layer)
+      for (Neuron neuron : neurons) {  
+        neuron.upward(msg);
+        sync();
+      }
+        
+      // 2.2 calculate total error
+      sync();
+      
+      // 2.3 downward propagation (start from the total error)
+      for (Neuron neuron : neurons) {  
+        neuron.downward(msg);
+        sync();
+      }
+    
+    }
+    // calculate the the average training error
+    */
+    
+  }
+  
+  private void fetchParameters(BSPPeer peer) {
+    // TODO fetch latest weights from the parameter server
+  }
+
+  private void pushParameters(BSPPeer peer) {
+    // TODO push updated weights
+  }
+
+}
diff --git a/src/main/java/org/apache/horn/distbelief/Neuron.java b/src/main/java/org/apache/horn/distbelief/Neuron.java
new file mode 100644
index 0000000..fadb522
--- /dev/null
+++ b/src/main/java/org/apache/horn/distbelief/Neuron.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.distbelief;
+
+import org.apache.hadoop.io.Writable;
+
+public abstract class Neuron<M extends Writable> implements NeuronInterface<M> {
+  double output;
+  double weight;
+
+  public void propagate(double gradient) {
+    // TODO Auto-generated method stub
+  }
+
+  public void setOutput(double output) {
+    this.output = output;
+  }
+
+  public double getOutput() {
+    return output;
+  }
+
+  public void push(double weight) {
+    // TODO Auto-generated method stub
+    this.weight = weight;
+  }
+
+  public double getUpdate() {
+    return weight;
+  }
+
+}
diff --git a/src/main/java/org/apache/horn/distbelief/NeuronInterface.java b/src/main/java/org/apache/horn/distbelief/NeuronInterface.java
new file mode 100644
index 0000000..8093b07
--- /dev/null
+++ b/src/main/java/org/apache/horn/distbelief/NeuronInterface.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.distbelief;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.Writable;
+
+public interface NeuronInterface<M extends Writable> {
+
+  /**
+   * This method is called when the messages are propagated from the lower
+   * layer. It can be used to determine if the neuron would activate, or fire.
+   * 
+   * @param messages
+   * @throws IOException
+   */
+  public void upward(Iterable<M> messages) throws IOException;
+
+  /**
+   * This method is called when the errors are propagated from the upper layer.
+   * It can be used to calculate the error of each neuron and change the
+   * weights.
+   * 
+   * @param messages
+   * @throws IOException
+   */
+  public void downward(Iterable<M> messages) throws IOException;
+  
+}
diff --git a/src/main/java/org/apache/horn/distbelief/PropMessage.java b/src/main/java/org/apache/horn/distbelief/PropMessage.java
new file mode 100644
index 0000000..029cd6a
--- /dev/null
+++ b/src/main/java/org/apache/horn/distbelief/PropMessage.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.distbelief;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.io.Writable;
+
+/**
+ * Message wrapper for a propagating message
+ */
+public class PropMessage<M extends Writable, W extends Writable> implements
+    Writable {
+
+  M message;
+  W weight;
+
+  public PropMessage(M message, W weight) {
+    this.message = message;
+    this.weight = weight;
+  }
+
+  /**
+   * @return the activation or error message
+   */
+  public M getMessage() {
+    return message;
+  }
+
+  public W getWeight() {
+    return weight;
+  }
+
+  @Override
+  public void readFields(DataInput in) throws IOException {
+    message.readFields(in);
+    weight.readFields(in);
+  }
+
+  @Override
+  public void write(DataOutput out) throws IOException {
+    message.write(out);
+    weight.write(out);
+  }
+
+}
diff --git a/src/test/java/org/apache/horn/distbelief/TestDistBeliefModelTrainer.java b/src/test/java/org/apache/horn/distbelief/TestDistBeliefModelTrainer.java
new file mode 100644
index 0000000..5bbd90c
--- /dev/null
+++ b/src/test/java/org/apache/horn/distbelief/TestDistBeliefModelTrainer.java
@@ -0,0 +1,5 @@
+package org.apache.horn.distbelief;
+
+public class TestDistBeliefModelTrainer {
+
+}
diff --git a/src/test/java/org/apache/horn/distbelief/TestNeuron.java b/src/test/java/org/apache/horn/distbelief/TestNeuron.java
new file mode 100644
index 0000000..9af1315
--- /dev/null
+++ b/src/test/java/org/apache/horn/distbelief/TestNeuron.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.distbelief;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hama.commons.math.Sigmoid;
+
+public class TestNeuron extends TestCase {
+  private static double learningRate = 0.1;
+  private static double bias = -1;
+  private static double theta = 0.8;
+
+  public static class MyNeuron extends
+      Neuron<PropMessage<DoubleWritable, DoubleWritable>> {
+
+    @Override
+    public void upward(
+        Iterable<PropMessage<DoubleWritable, DoubleWritable>> messages)
+        throws IOException {
+      double sum = 0;
+      for (PropMessage<DoubleWritable, DoubleWritable> m : messages) {
+        sum += m.getMessage().get() * m.getWeight().get();
+      }
+      sum += (bias * theta);
+
+      double output = new Sigmoid().apply(sum);
+      this.setOutput(output);
+      this.propagate(output);
+    }
+
+    @Override
+    public void downward(
+        Iterable<PropMessage<DoubleWritable, DoubleWritable>> messages)
+        throws IOException {
+      for (PropMessage<DoubleWritable, DoubleWritable> m : messages) {
+        // Calculates error gradient for each neuron
+        double gradient = this.getOutput() * (1 - this.getOutput())
+            * m.getMessage().get() * m.getWeight().get();
+
+        // Propagates to lower layer
+        this.propagate(gradient);
+
+        // Weight corrections
+        double weight = learningRate * this.getOutput() * m.getMessage().get();
+        this.push(weight);
+      }
+    }
+
+  }
+
+  public void testProp() throws IOException {
+    List<PropMessage<DoubleWritable, DoubleWritable>> x = new ArrayList<PropMessage<DoubleWritable, DoubleWritable>>();
+    x.add(new PropMessage<DoubleWritable, DoubleWritable>(new DoubleWritable(
+        1.0), new DoubleWritable(0.5)));
+    x.add(new PropMessage<DoubleWritable, DoubleWritable>(new DoubleWritable(
+        1.0), new DoubleWritable(0.4)));
+
+    MyNeuron n = new MyNeuron();
+    n.upward(x);
+    assertEquals(0.5249791874789399, n.getOutput());
+
+    x.clear();
+    x.add(new PropMessage<DoubleWritable, DoubleWritable>(new DoubleWritable(
+        -0.1274), new DoubleWritable(-1.2)));
+    n.downward(x);
+    assertEquals(-0.006688234848481696, n.getUpdate());
+  }
+  
+}