Undo push
diff --git a/CHANGES.txt b/CHANGES.txt
deleted file mode 100644
index 0826b90..0000000
--- a/CHANGES.txt
+++ /dev/null
@@ -1,11 +0,0 @@
-Horn Change Log
-
-Release 0.1.0 (unreleased changes)
-
-  NEW FEATURES
-
-    HORN-1: Website for Apache Horn (Incubating) (Elmurod Talipov via edwardyoon)
-    
-  BUG FIXES
-
-  IMPROVEMENTS
diff --git a/src/main/java/org/apache/horn/distbelief/DistBeliefModelTrainer.java b/src/main/java/org/apache/horn/distbelief/DistBeliefModelTrainer.java
deleted file mode 100644
index c7326ab..0000000
--- a/src/main/java/org/apache/horn/distbelief/DistBeliefModelTrainer.java
+++ /dev/null
@@ -1,87 +0,0 @@
-package org.apache.horn.distbelief;
-
-import java.io.IOException;
-
-import org.apache.hama.bsp.BSP;
-import org.apache.hama.bsp.BSPPeer;
-import org.apache.hama.bsp.sync.SyncException;
-
-/**
- * This DistBeliefModelTrainer performs each SGD. 
- */
-public class DistBeliefModelTrainer extends BSP {
-
-  private boolean isConverge = false;
-  private int iterations;
-  private int maxIterations;
-  
-  @Override
-  public final void setup(BSPPeer peer) {
-    // loads subset of neural network model replica into memory
-  }
-  
-  @Override
-  public void bsp(BSPPeer peer) throws IOException, SyncException,
-      InterruptedException {
-
-    // Iterate until reach max iteration or convergence
-    while (this.iterations++ < maxIterations) {
-      
-      // Fetch latest parameters
-      fetchParameters(peer);
-      
-      // Perform mini-batch
-      doMinibatch(peer);
-      
-      // Push parameters
-      pushParameters(peer);
-      
-      if (this.isConverge) {
-        break;
-      }
-    }
-    
-  }
-
-  /**
-   * Performs the mini-batch
-   * @param peer
-   */
-  private void doMinibatch(BSPPeer peer) {
-    double avgTrainingError = 0.0;
-    // 1. loads a next set of mini-batch instances from assigned splits into memory
-    
-    // 2. train incrementally from a mini-batch of instances
-    /*
-    for (Instance trainingInstance : MiniBatchSet) {
-      
-      // 2.1 upward propagation (start from the input layer)
-      for (Neuron neuron : neurons) {  
-        neuron.upward(msg);
-        sync();
-      }
-        
-      // 2.2 calculate total error
-      sync();
-      
-      // 2.3 downward propagation (start from the total error)
-      for (Neuron neuron : neurons) {  
-        neuron.downward(msg);
-        sync();
-      }
-    
-    }
-    // calculate the the average training error
-    */
-    
-  }
-  
-  private void fetchParameters(BSPPeer peer) {
-    // TODO fetch latest weights from the parameter server
-  }
-
-  private void pushParameters(BSPPeer peer) {
-    // TODO push updated weights
-  }
-
-}
diff --git a/src/main/java/org/apache/horn/distbelief/Neuron.java b/src/main/java/org/apache/horn/distbelief/Neuron.java
deleted file mode 100644
index fadb522..0000000
--- a/src/main/java/org/apache/horn/distbelief/Neuron.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.horn.distbelief;
-
-import org.apache.hadoop.io.Writable;
-
-public abstract class Neuron<M extends Writable> implements NeuronInterface<M> {
-  double output;
-  double weight;
-
-  public void propagate(double gradient) {
-    // TODO Auto-generated method stub
-  }
-
-  public void setOutput(double output) {
-    this.output = output;
-  }
-
-  public double getOutput() {
-    return output;
-  }
-
-  public void push(double weight) {
-    // TODO Auto-generated method stub
-    this.weight = weight;
-  }
-
-  public double getUpdate() {
-    return weight;
-  }
-
-}
diff --git a/src/main/java/org/apache/horn/distbelief/NeuronInterface.java b/src/main/java/org/apache/horn/distbelief/NeuronInterface.java
deleted file mode 100644
index 8093b07..0000000
--- a/src/main/java/org/apache/horn/distbelief/NeuronInterface.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.horn.distbelief;
-
-import java.io.IOException;
-
-import org.apache.hadoop.io.Writable;
-
-public interface NeuronInterface<M extends Writable> {
-
-  /**
-   * This method is called when the messages are propagated from the lower
-   * layer. It can be used to determine if the neuron would activate, or fire.
-   * 
-   * @param messages
-   * @throws IOException
-   */
-  public void upward(Iterable<M> messages) throws IOException;
-
-  /**
-   * This method is called when the errors are propagated from the upper layer.
-   * It can be used to calculate the error of each neuron and change the
-   * weights.
-   * 
-   * @param messages
-   * @throws IOException
-   */
-  public void downward(Iterable<M> messages) throws IOException;
-  
-}
diff --git a/src/main/java/org/apache/horn/distbelief/PropMessage.java b/src/main/java/org/apache/horn/distbelief/PropMessage.java
deleted file mode 100644
index 029cd6a..0000000
--- a/src/main/java/org/apache/horn/distbelief/PropMessage.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.horn.distbelief;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.hadoop.io.Writable;
-
-/**
- * Message wrapper for a propagating message
- */
-public class PropMessage<M extends Writable, W extends Writable> implements
-    Writable {
-
-  M message;
-  W weight;
-
-  public PropMessage(M message, W weight) {
-    this.message = message;
-    this.weight = weight;
-  }
-
-  /**
-   * @return the activation or error message
-   */
-  public M getMessage() {
-    return message;
-  }
-
-  public W getWeight() {
-    return weight;
-  }
-
-  @Override
-  public void readFields(DataInput in) throws IOException {
-    message.readFields(in);
-    weight.readFields(in);
-  }
-
-  @Override
-  public void write(DataOutput out) throws IOException {
-    message.write(out);
-    weight.write(out);
-  }
-
-}
diff --git a/src/test/java/org/apache/horn/distbelief/TestDistBeliefModelTrainer.java b/src/test/java/org/apache/horn/distbelief/TestDistBeliefModelTrainer.java
deleted file mode 100644
index 5bbd90c..0000000
--- a/src/test/java/org/apache/horn/distbelief/TestDistBeliefModelTrainer.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package org.apache.horn.distbelief;
-
-public class TestDistBeliefModelTrainer {
-
-}
diff --git a/src/test/java/org/apache/horn/distbelief/TestNeuron.java b/src/test/java/org/apache/horn/distbelief/TestNeuron.java
deleted file mode 100644
index 9af1315..0000000
--- a/src/test/java/org/apache/horn/distbelief/TestNeuron.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.horn.distbelief;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.io.DoubleWritable;
-import org.apache.hama.commons.math.Sigmoid;
-
-public class TestNeuron extends TestCase {
-  private static double learningRate = 0.1;
-  private static double bias = -1;
-  private static double theta = 0.8;
-
-  public static class MyNeuron extends
-      Neuron<PropMessage<DoubleWritable, DoubleWritable>> {
-
-    @Override
-    public void upward(
-        Iterable<PropMessage<DoubleWritable, DoubleWritable>> messages)
-        throws IOException {
-      double sum = 0;
-      for (PropMessage<DoubleWritable, DoubleWritable> m : messages) {
-        sum += m.getMessage().get() * m.getWeight().get();
-      }
-      sum += (bias * theta);
-
-      double output = new Sigmoid().apply(sum);
-      this.setOutput(output);
-      this.propagate(output);
-    }
-
-    @Override
-    public void downward(
-        Iterable<PropMessage<DoubleWritable, DoubleWritable>> messages)
-        throws IOException {
-      for (PropMessage<DoubleWritable, DoubleWritable> m : messages) {
-        // Calculates error gradient for each neuron
-        double gradient = this.getOutput() * (1 - this.getOutput())
-            * m.getMessage().get() * m.getWeight().get();
-
-        // Propagates to lower layer
-        this.propagate(gradient);
-
-        // Weight corrections
-        double weight = learningRate * this.getOutput() * m.getMessage().get();
-        this.push(weight);
-      }
-    }
-
-  }
-
-  public void testProp() throws IOException {
-    List<PropMessage<DoubleWritable, DoubleWritable>> x = new ArrayList<PropMessage<DoubleWritable, DoubleWritable>>();
-    x.add(new PropMessage<DoubleWritable, DoubleWritable>(new DoubleWritable(
-        1.0), new DoubleWritable(0.5)));
-    x.add(new PropMessage<DoubleWritable, DoubleWritable>(new DoubleWritable(
-        1.0), new DoubleWritable(0.4)));
-
-    MyNeuron n = new MyNeuron();
-    n.upward(x);
-    assertEquals(0.5249791874789399, n.getOutput());
-
-    x.clear();
-    x.add(new PropMessage<DoubleWritable, DoubleWritable>(new DoubleWritable(
-        -0.1274), new DoubleWritable(-1.2)));
-    n.downward(x);
-    assertEquals(-0.006688234848481696, n.getUpdate());
-  }
-  
-}