HORN-3: Import initial source code from Hama ML package
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..bffd62e
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,118 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+  <parent>
+    <groupId>org.apache</groupId>
+    <artifactId>apache</artifactId>
+    <version>8</version>
+  </parent>
+
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.horn</groupId>
+  <artifactId>horn</artifactId>
+  <name>Horn</name>
+  <version>0.1.0-SNAPSHOT</version>
+  <packaging>jar</packaging>
+
+  <properties>
+    <hama.version>0.7.0</hama.version>
+    <hadoop.version>2.7.0</hadoop.version>
+    <protobuf.version>2.5.0</protobuf.version>
+    <junit.version>4.8.1</junit.version>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hama</groupId>
+      <artifactId>hama-commons</artifactId>
+      <version>${hama.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hama</groupId>
+      <artifactId>hama-core</artifactId>
+      <version>${hama.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hama</groupId>
+      <artifactId>hama-ml</artifactId>
+      <version>${hama.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <classifier>tests</classifier>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-auth</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+      <version>${protobuf.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-client</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+    </dependency>
+
+  </dependencies>
+
+  <build>
+    <finalName>horn-${project.version}</finalName>
+    <plugins>
+    
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.3.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+
+    </plugins>
+  </build>
+</project>
+
diff --git a/src/main/java/org/apache/horn/bsp/AbstractLayeredNeuralNetwork.java b/src/main/java/org/apache/horn/bsp/AbstractLayeredNeuralNetwork.java
new file mode 100644
index 0000000..c29559d
--- /dev/null
+++ b/src/main/java/org/apache/horn/bsp/AbstractLayeredNeuralNetwork.java
@@ -0,0 +1,261 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.hama.commons.math.DoubleDoubleFunction;
+import org.apache.hama.commons.math.DoubleFunction;
+import org.apache.hama.commons.math.DoubleMatrix;
+import org.apache.hama.commons.math.DoubleVector;
+import org.apache.hama.commons.math.FunctionFactory;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
+/**
+ * AbstractLayeredNeuralNetwork defines the general operations for derivative
+ * layered models, include Linear Regression, Logistic Regression, Multilayer
+ * Perceptron, Autoencoder, and Restricted Boltzmann Machine, etc.
+ * 
+ * In general, these models consist of neurons which are aligned in layers.
+ * Between layers, for any two adjacent layers, the neurons are connected to
+ * form a bipartite weighted graph.
+ * 
+ */
+abstract class AbstractLayeredNeuralNetwork extends NeuralNetwork {
+
+  private static final double DEFAULT_REGULARIZATION_WEIGHT = 0;
+  private static final double DEFAULT_MOMENTUM_WEIGHT = 0.1;
+
+  double trainingError;
+
+  /* The weight of regularization */
+  protected double regularizationWeight;
+
+  /* The momentumWeight */
+  protected double momentumWeight;
+
+  /* The cost function of the model */
+  protected DoubleDoubleFunction costFunction;
+
+  /* Record the size of each layer */
+  protected List<Integer> layerSizeList;
+
+  protected TrainingMethod trainingMethod;
+  
+  protected LearningStyle learningStyle;
+
+  public static enum TrainingMethod {
+    GRADIENT_DESCENT
+  }
+  
+  public static enum LearningStyle {
+    UNSUPERVISED,
+    SUPERVISED
+  }
+  
+  public AbstractLayeredNeuralNetwork() {
+    this.regularizationWeight = DEFAULT_REGULARIZATION_WEIGHT;
+    this.momentumWeight = DEFAULT_MOMENTUM_WEIGHT;
+    this.trainingMethod = TrainingMethod.GRADIENT_DESCENT;
+    this.learningStyle = LearningStyle.SUPERVISED;
+  }
+
+  public AbstractLayeredNeuralNetwork(String modelPath) {
+    super(modelPath);
+  }
+
+  /**
+   * Set the regularization weight. Recommend in the range [0, 0.1), More
+   * complex the model is, less weight the regularization is.
+   * 
+   * @param regularizationWeight
+   */
+  public void setRegularizationWeight(double regularizationWeight) {
+    Preconditions.checkArgument(regularizationWeight >= 0
+        && regularizationWeight < 1.0,
+        "Regularization weight must be in range [0, 1.0)");
+    this.regularizationWeight = regularizationWeight;
+  }
+
+  public double getRegularizationWeight() {
+    return this.regularizationWeight;
+  }
+
+  /**
+   * Set the momemtum weight for the model. Recommend in range [0, 0.5].
+   * 
+   * @param momentumWeight
+   */
+  public void setMomemtumWeight(double momentumWeight) {
+    Preconditions.checkArgument(momentumWeight >= 0 && momentumWeight <= 1.0,
+        "Momentum weight must be in range [0, 1.0]");
+    this.momentumWeight = momentumWeight;
+  }
+
+  public double getMomemtumWeight() {
+    return this.momentumWeight;
+  }
+
+  public void setTrainingMethod(TrainingMethod method) {
+    this.trainingMethod = method;
+  }
+
+  public TrainingMethod getTrainingMethod() {
+    return this.trainingMethod;
+  }
+  
+  public void setLearningStyle(LearningStyle style) {
+    this.learningStyle = style;
+  }
+  
+  public LearningStyle getLearningStyle() {
+    return this.learningStyle;
+  }
+
+  /**
+   * Set the cost function for the model.
+   * 
+   * @param costFunction
+   */
+  public void setCostFunction(DoubleDoubleFunction costFunction) {
+    this.costFunction = costFunction;
+  }
+
+  /**
+   * Add a layer of neurons with specified size. If the added layer is not the
+   * first layer, it will automatically connects the neurons between with the
+   * previous layer.
+   * 
+   * @param size
+   * @param isFinalLayer If false, add a bias neuron.
+   * @param squashingFunction The squashing function for this layer, input layer
+   *          is f(x) = x by default.
+   * @return The layer index, starts with 0.
+   */
+  public abstract int addLayer(int size, boolean isFinalLayer,
+      DoubleFunction squashingFunction);
+
+  /**
+   * Get the size of a particular layer.
+   * 
+   * @param layer
+   * @return The layer size.
+   */
+  public int getLayerSize(int layer) {
+    Preconditions.checkArgument(
+        layer >= 0 && layer < this.layerSizeList.size(),
+        String.format("Input must be in range [0, %d]\n",
+            this.layerSizeList.size() - 1));
+    return this.layerSizeList.get(layer);
+  }
+
+  /**
+   * Get the layer size list.
+   * 
+   * @return The layer size list.
+   */
+  protected List<Integer> getLayerSizeList() {
+    return this.layerSizeList;
+  }
+
+  /**
+   * Get the weights between layer layerIdx and layerIdx + 1
+   * 
+   * @param layerIdx The index of the layer
+   * @return The weights in form of {@link DoubleMatrix}
+   */
+  public abstract DoubleMatrix getWeightsByLayer(int layerIdx);
+
+  /**
+   * Get the updated weights using one training instance.
+   * 
+   * @param trainingInstance The trainingInstance is the concatenation of
+   *          feature vector and class label vector.
+   * @return The update of each weight, in form of matrix list.
+   * @throws Exception
+   */
+  public abstract DoubleMatrix[] trainByInstance(DoubleVector trainingInstance);
+
+  /**
+   * Get the output calculated by the model.
+   * 
+   * @param instance The feature instance.
+   * @return a new vector with the result of the operation.
+   */
+  public abstract DoubleVector getOutput(DoubleVector instance);
+
+  /**
+   * Calculate the training error based on the labels and outputs.
+   * 
+   * @param labels
+   * @param output
+   */
+  protected abstract void calculateTrainingError(DoubleVector labels,
+      DoubleVector output);
+
+  @Override
+  public void readFields(DataInput input) throws IOException {
+    super.readFields(input);
+    // read regularization weight
+    this.regularizationWeight = input.readDouble();
+    // read momentum weight
+    this.momentumWeight = input.readDouble();
+
+    // read cost function
+    this.costFunction = FunctionFactory
+        .createDoubleDoubleFunction(WritableUtils.readString(input));
+
+    // read layer size list
+    int numLayers = input.readInt();
+    this.layerSizeList = Lists.newArrayList();
+    for (int i = 0; i < numLayers; ++i) {
+      this.layerSizeList.add(input.readInt());
+    }
+
+    this.trainingMethod = WritableUtils.readEnum(input, TrainingMethod.class);
+    this.learningStyle = WritableUtils.readEnum(input, LearningStyle.class);
+  }
+
+  @Override
+  public void write(DataOutput output) throws IOException {
+    super.write(output);
+    // write regularization weight
+    output.writeDouble(this.regularizationWeight);
+    // write momentum weight
+    output.writeDouble(this.momentumWeight);
+
+    // write cost function
+    WritableUtils.writeString(output, costFunction.getFunctionName());
+
+    // write layer size list
+    output.writeInt(this.layerSizeList.size());
+    for (Integer aLayerSizeList : this.layerSizeList) {
+      output.writeInt(aLayerSizeList);
+    }
+
+    WritableUtils.writeEnum(output, this.trainingMethod);
+    WritableUtils.writeEnum(output, this.learningStyle);
+  }
+
+}
diff --git a/src/main/java/org/apache/horn/bsp/AutoEncoder.java b/src/main/java/org/apache/horn/bsp/AutoEncoder.java
new file mode 100644
index 0000000..6c84dc2
--- /dev/null
+++ b/src/main/java/org/apache/horn/bsp/AutoEncoder.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import java.util.Map;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hama.commons.math.DenseDoubleVector;
+import org.apache.hama.commons.math.DoubleFunction;
+import org.apache.hama.commons.math.DoubleMatrix;
+import org.apache.hama.commons.math.DoubleVector;
+import org.apache.hama.commons.math.FunctionFactory;
+import org.apache.hama.ml.util.FeatureTransformer;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * AutoEncoder is a model used for dimensional reduction and feature learning.
+ * It is a special kind of {@link NeuralNetwork} that consists of three layers
+ * of neurons, where the first layer and third layer contains the same number of
+ * neurons.
+ * 
+ */
+public class AutoEncoder {
+
+  private final SmallLayeredNeuralNetwork model;
+
+  /**
+   * Initialize the autoencoder.
+   * 
+   * @param inputDimensions The number of dimensions for the input feature.
+   * @param compressedDimensions The number of dimensions for the compressed
+   *          information.
+   */
+  public AutoEncoder(int inputDimensions, int compressedDimensions) {
+    model = new SmallLayeredNeuralNetwork();
+    model.addLayer(inputDimensions, false,
+        FunctionFactory.createDoubleFunction("Sigmoid"));
+    model.addLayer(compressedDimensions, false,
+        FunctionFactory.createDoubleFunction("Sigmoid"));
+    model.addLayer(inputDimensions, true,
+        FunctionFactory.createDoubleFunction("Sigmoid"));
+    model.setLearningStyle(AbstractLayeredNeuralNetwork.LearningStyle.UNSUPERVISED);
+    model.setCostFunction(FunctionFactory
+        .createDoubleDoubleFunction("SquaredError"));
+  }
+
+  public AutoEncoder(String modelPath) {
+    model = new SmallLayeredNeuralNetwork(modelPath);
+  }
+
+  public AutoEncoder setLearningRate(double learningRate) {
+    model.setLearningRate(learningRate);
+    return this;
+  }
+
+  public AutoEncoder setMomemtumWeight(double momentumWeight) {
+    model.setMomemtumWeight(momentumWeight);
+    return this;
+  }
+
+  public AutoEncoder setRegularizationWeight(double regularizationWeight) {
+    model.setRegularizationWeight(regularizationWeight);
+    return this;
+  }
+  
+  public AutoEncoder setModelPath(String modelPath) {
+    model.setModelPath(modelPath);
+    return this;
+  }
+
+  /**
+   * Train the autoencoder with given data. Note that the training data is
+   * pre-processed, where the features
+   * 
+   * @param dataInputPath
+   * @param trainingParams
+   */
+  public void train(Path dataInputPath, Map<String, String> trainingParams) {
+    model.train(dataInputPath, trainingParams);
+  }
+
+  /**
+   * Train the model with one instance.
+   * 
+   * @param trainingInstance
+   */
+  public void trainOnline(DoubleVector trainingInstance) {
+    model.trainOnline(trainingInstance);
+  }
+
+  /**
+   * Get the matrix M used to encode the input features.
+   * 
+   * @return this matrix with encode the input.
+   */
+  public DoubleMatrix getEncodeWeightMatrix() {
+    return model.getWeightsByLayer(0);
+  }
+
+  /**
+   * Get the matrix M used to decode the compressed information.
+   * 
+   * @return this matrix with decode the compressed information.
+   */
+  public DoubleMatrix getDecodeWeightMatrix() {
+    return model.getWeightsByLayer(1);
+  }
+
+  /**
+   * Transform the input features.
+   * 
+   * @param inputInstance
+   * @return The compressed information.
+   */
+  private DoubleVector transform(DoubleVector inputInstance, int inputLayer) {
+    DoubleVector internalInstance = new DenseDoubleVector(inputInstance.getDimension() + 1);
+    internalInstance.set(0, 1);
+    for (int i = 0; i < inputInstance.getDimension(); ++i) {
+      internalInstance.set(i + 1, inputInstance.get(i));
+    }
+    DoubleFunction squashingFunction = model
+        .getSquashingFunction(inputLayer);
+    DoubleMatrix weightMatrix = null;
+    if (inputLayer == 0) {
+      weightMatrix = this.getEncodeWeightMatrix();
+    } else {
+      weightMatrix = this.getDecodeWeightMatrix();
+    }
+    DoubleVector vec = weightMatrix.multiplyVectorUnsafe(internalInstance);
+    vec = vec.applyToElements(squashingFunction);
+    return vec;
+  }
+
+  /**
+   * Encode the input instance.
+   * @param inputInstance
+   * @return a new vector with the encode input instance.
+   */
+  public DoubleVector encode(DoubleVector inputInstance) {
+    Preconditions
+        .checkArgument(
+            inputInstance.getDimension() == model.getLayerSize(0) - 1,
+            String.format("The dimension of input instance is %d, but the model requires dimension %d.",
+                    inputInstance.getDimension(), model.getLayerSize(1) - 1));
+    return this.transform(inputInstance, 0);
+  }
+
+  /**
+   * Decode the input instance.
+   * @param inputInstance
+   * @return a new vector with the decode input instance.
+   */
+  public DoubleVector decode(DoubleVector inputInstance) {
+    Preconditions
+        .checkArgument(
+            inputInstance.getDimension() == model.getLayerSize(1) - 1,
+            String.format("The dimension of input instance is %d, but the model requires dimension %d.",
+                    inputInstance.getDimension(), model.getLayerSize(1) - 1));
+    return this.transform(inputInstance, 1);
+  }
+  
+  /**
+   * Get the label(s) according to the given features.
+   * @param inputInstance
+   * @return a new vector with output of the model according to given feature instance.
+   */
+  public DoubleVector getOutput(DoubleVector inputInstance) {
+    return model.getOutput(inputInstance);
+  }
+  
+  /**
+   * Set the feature transformer.
+   * @param featureTransformer
+   */
+  public void setFeatureTransformer(FeatureTransformer featureTransformer) {
+    this.model.setFeatureTransformer(featureTransformer);
+  }
+
+}
diff --git a/src/main/java/org/apache/horn/bsp/NeuralNetwork.java b/src/main/java/org/apache/horn/bsp/NeuralNetwork.java
new file mode 100644
index 0000000..c7f14de
--- /dev/null
+++ b/src/main/java/org/apache/horn/bsp/NeuralNetwork.java
@@ -0,0 +1,271 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import com.google.common.base.Preconditions;
+import com.google.common.io.Closeables;
+import org.apache.commons.lang.SerializationUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.hama.ml.util.DefaultFeatureTransformer;
+import org.apache.hama.ml.util.FeatureTransformer;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Map;
+
+/**
+ * NeuralNetwork defines the general operations for all the derivative models.
+ * Typically, all derivative models such as Linear Regression, Logistic
+ * Regression, and Multilayer Perceptron consist of neurons and the weights
+ * between neurons.
+ * 
+ */
+abstract class NeuralNetwork implements Writable {
+
+  private static final double DEFAULT_LEARNING_RATE = 0.5;
+
+  protected double learningRate;
+  protected boolean learningRateDecay = false;
+
+  // the name of the model
+  protected String modelType;
+  // the path to store the model
+  protected String modelPath;
+
+  protected FeatureTransformer featureTransformer;
+
+  public NeuralNetwork() {
+    this.learningRate = DEFAULT_LEARNING_RATE;
+    this.modelType = this.getClass().getSimpleName();
+    this.featureTransformer = new DefaultFeatureTransformer();
+  }
+
+  public NeuralNetwork(String modelPath) {
+    try {
+      this.modelPath = modelPath;
+      this.readFromModel();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+  }
+
+  /**
+   * Set the degree of aggression during model training, a large learning rate
+   * can increase the training speed, but it also decrease the chance of model
+   * converge. Recommend in range (0, 0.3).
+   * 
+   * @param learningRate
+   */
+  public void setLearningRate(double learningRate) {
+    Preconditions.checkArgument(learningRate > 0,
+        "Learning rate must be larger than 0.");
+    this.learningRate = learningRate;
+  }
+
+  public double getLearningRate() {
+    return this.learningRate;
+  }
+
+  public void isLearningRateDecay(boolean decay) {
+    this.learningRateDecay = decay;
+  }
+
+  public String getModelType() {
+    return this.modelType;
+  }
+
+  /**
+   * Train the model with the path of given training data and parameters.
+   * 
+   * @param dataInputPath The path of the training data.
+   * @param trainingParams The parameters for training.
+   * @throws IOException
+   */
+  public void train(Path dataInputPath, Map<String, String> trainingParams) {
+    Preconditions.checkArgument(this.modelPath != null,
+        "Please set the model path before training.");
+    // train with BSP job
+    try {
+      trainInternal(dataInputPath, trainingParams);
+      // write the trained model back to model path
+      this.readFromModel();
+    } catch (IOException e) {
+      e.printStackTrace();
+    } catch (InterruptedException e) {
+      e.printStackTrace();
+    } catch (ClassNotFoundException e) {
+      e.printStackTrace();
+    }
+  }
+
+  /**
+   * Train the model with the path of given training data and parameters.
+   * 
+   * @param dataInputPath
+   * @param trainingParams
+   */
+  protected abstract void trainInternal(Path dataInputPath,
+      Map<String, String> trainingParams) throws IOException,
+      InterruptedException, ClassNotFoundException;
+
+  /**
+   * Read the model meta-data from the specified location.
+   * 
+   * @throws IOException
+   */
+  protected void readFromModel() throws IOException {
+    Preconditions.checkArgument(this.modelPath != null,
+        "Model path has not been set.");
+    Configuration conf = new Configuration();
+    FSDataInputStream is = null;
+    try {
+      URI uri = new URI(this.modelPath);
+      FileSystem fs = FileSystem.get(uri, conf);
+      is = new FSDataInputStream(fs.open(new Path(modelPath)));
+      this.readFields(is);
+    } catch (URISyntaxException e) {
+      e.printStackTrace();
+    } finally {
+      Closeables.close(is, false);
+    }
+  }
+
+  /**
+   * Write the model data to specified location.
+   * 
+   * @throws IOException
+   */
+  public void writeModelToFile() throws IOException {
+    Preconditions.checkArgument(this.modelPath != null,
+        "Model path has not been set.");
+    Configuration conf = new Configuration();
+    FSDataOutputStream is = null;
+    try {
+      URI uri = new URI(this.modelPath);
+      FileSystem fs = FileSystem.get(uri, conf);
+      is = fs.create(new Path(this.modelPath), true);
+      this.write(is);
+    } catch (URISyntaxException e) {
+      e.printStackTrace();
+    }
+
+    Closeables.close(is, false);
+  }
+
+  /**
+   * Set the model path.
+   * 
+   * @param modelPath
+   */
+  public void setModelPath(String modelPath) {
+    this.modelPath = modelPath;
+  }
+
+  /**
+   * Get the model path.
+   * 
+   * @return the path to store the model.
+   */
+  public String getModelPath() {
+    return this.modelPath;
+  }
+
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Override
+  public void readFields(DataInput input) throws IOException {
+    // read model type
+    this.modelType = WritableUtils.readString(input);
+    // read learning rate
+    this.learningRate = input.readDouble();
+    // read model path
+    this.modelPath = WritableUtils.readString(input);
+
+    if (this.modelPath.equals("null")) {
+      this.modelPath = null;
+    }
+
+    // read feature transformer
+    int bytesLen = input.readInt();
+    byte[] featureTransformerBytes = new byte[bytesLen];
+    for (int i = 0; i < featureTransformerBytes.length; ++i) {
+      featureTransformerBytes[i] = input.readByte();
+    }
+
+    Class<? extends FeatureTransformer> featureTransformerCls = (Class<? extends FeatureTransformer>) SerializationUtils
+        .deserialize(featureTransformerBytes);
+
+    Constructor[] constructors = featureTransformerCls
+        .getDeclaredConstructors();
+    Constructor constructor = constructors[0];
+
+    try {
+      this.featureTransformer = (FeatureTransformer) constructor
+          .newInstance(new Object[] {});
+    } catch (InstantiationException e) {
+      e.printStackTrace();
+    } catch (IllegalAccessException e) {
+      e.printStackTrace();
+    } catch (IllegalArgumentException e) {
+      e.printStackTrace();
+    } catch (InvocationTargetException e) {
+      e.printStackTrace();
+    }
+  }
+
+  @Override
+  public void write(DataOutput output) throws IOException {
+    // write model type
+    WritableUtils.writeString(output, modelType);
+    // write learning rate
+    output.writeDouble(learningRate);
+    // write model path
+    if (this.modelPath != null) {
+      WritableUtils.writeString(output, modelPath);
+    } else {
+      WritableUtils.writeString(output, "null");
+    }
+
+    // serialize the class
+    Class<? extends FeatureTransformer> featureTransformerCls = this.featureTransformer
+        .getClass();
+    byte[] featureTransformerBytes = SerializationUtils
+        .serialize(featureTransformerCls);
+    output.writeInt(featureTransformerBytes.length);
+    output.write(featureTransformerBytes);
+  }
+
+  public void setFeatureTransformer(FeatureTransformer featureTransformer) {
+    this.featureTransformer = featureTransformer;
+  }
+
+  public FeatureTransformer getFeatureTransformer() {
+    return this.featureTransformer;
+  }
+
+}
diff --git a/src/main/java/org/apache/horn/bsp/NeuralNetworkTrainer.java b/src/main/java/org/apache/horn/bsp/NeuralNetworkTrainer.java
new file mode 100644
index 0000000..1c8a198
--- /dev/null
+++ b/src/main/java/org/apache/horn/bsp/NeuralNetworkTrainer.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hama.bsp.BSP;
+import org.apache.hama.bsp.BSPPeer;
+import org.apache.hama.bsp.sync.SyncException;
+import org.apache.hama.commons.io.VectorWritable;
+import org.apache.hama.ml.perception.MLPMessage;
+import org.apache.hama.ml.util.DefaultFeatureTransformer;
+import org.apache.hama.ml.util.FeatureTransformer;
+
+/**
+ * The trainer that is used to train the {@link SmallLayeredNeuralNetwork} with
+ * BSP. The trainer would read the training data and obtain the trained
+ * parameters of the model.
+ * 
+ */
+public abstract class NeuralNetworkTrainer extends
+    BSP<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> {
+
+  protected static final Log LOG = LogFactory
+      .getLog(NeuralNetworkTrainer.class);
+
+  protected Configuration conf;
+  protected int maxIteration;
+  protected int batchSize;
+  protected String trainingMode;
+  
+  protected FeatureTransformer featureTransformer;
+  
+  @Override
+  final public void setup(
+      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
+      throws IOException, SyncException, InterruptedException {
+    conf = peer.getConfiguration();
+    featureTransformer = new DefaultFeatureTransformer();
+    this.extraSetup(peer);
+  }
+
+  /**
+   * Handle extra setup for sub-classes.
+   * 
+   * @param peer
+   * @throws IOException
+   * @throws SyncException
+   * @throws InterruptedException
+   */
+  protected void extraSetup(
+      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
+      throws IOException, SyncException, InterruptedException {
+
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public abstract void bsp(
+      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
+      throws IOException, SyncException, InterruptedException;
+
+  @Override
+  public void cleanup(
+      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
+      throws IOException {
+    this.extraCleanup(peer);
+    // write model to modelPath
+  }
+
+  /**
+   * Handle cleanup for sub-classes. Write the trained model back.
+   * 
+   * @param peer
+   * @throws IOException
+   * @throws SyncException
+   * @throws InterruptedException
+   */
+  protected void extraCleanup(
+      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, MLPMessage> peer)
+      throws IOException {
+
+  }
+
+}
diff --git a/src/main/java/org/apache/horn/bsp/SmallLayeredNeuralNetwork.java b/src/main/java/org/apache/horn/bsp/SmallLayeredNeuralNetwork.java
new file mode 100644
index 0000000..4aee4ce
--- /dev/null
+++ b/src/main/java/org/apache/horn/bsp/SmallLayeredNeuralNetwork.java
@@ -0,0 +1,567 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang.math.RandomUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.hama.HamaConfiguration;
+import org.apache.hama.bsp.BSPJob;
+import org.apache.hama.commons.io.MatrixWritable;
+import org.apache.hama.commons.io.VectorWritable;
+import org.apache.hama.commons.math.DenseDoubleMatrix;
+import org.apache.hama.commons.math.DenseDoubleVector;
+import org.apache.hama.commons.math.DoubleFunction;
+import org.apache.hama.commons.math.DoubleMatrix;
+import org.apache.hama.commons.math.DoubleVector;
+import org.apache.hama.commons.math.FunctionFactory;
+import org.mortbay.log.Log;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
+/**
+ * SmallLayeredNeuralNetwork defines the general operations for derivative
+ * layered models, include Linear Regression, Logistic Regression, Multilayer
+ * Perceptron, Autoencoder, and Restricted Boltzmann Machine, etc. For
+ * SmallLayeredNeuralNetwork, the training can be conducted in parallel, but the
+ * parameters of the models are assumes to be stored in a single machine.
+ * 
+ * In general, these models consist of neurons which are aligned in layers.
+ * Between layers, for any two adjacent layers, the neurons are connected to
+ * form a bipartite weighted graph.
+ * 
+ */
+public class SmallLayeredNeuralNetwork extends AbstractLayeredNeuralNetwork {
+
+  /* Weights between neurons at adjacent layers */
+  protected List<DoubleMatrix> weightMatrixList;
+
+  /* Previous weight updates between neurons at adjacent layers */
+  protected List<DoubleMatrix> prevWeightUpdatesList;
+
+  /* Different layers can have different squashing function */
+  protected List<DoubleFunction> squashingFunctionList;
+
+  protected int finalLayerIdx;
+
+  public SmallLayeredNeuralNetwork() {
+    this.layerSizeList = Lists.newArrayList();
+    this.weightMatrixList = Lists.newArrayList();
+    this.prevWeightUpdatesList = Lists.newArrayList();
+    this.squashingFunctionList = Lists.newArrayList();
+  }
+
+  public SmallLayeredNeuralNetwork(String modelPath) {
+    super(modelPath);
+  }
+
+  @Override
+  /**
+   * {@inheritDoc}
+   */
+  public int addLayer(int size, boolean isFinalLayer,
+      DoubleFunction squashingFunction) {
+    Preconditions.checkArgument(size > 0,
+        "Size of layer must be larger than 0.");
+    if (!isFinalLayer) {
+      size += 1;
+    }
+
+    this.layerSizeList.add(size);
+    int layerIdx = this.layerSizeList.size() - 1;
+    if (isFinalLayer) {
+      this.finalLayerIdx = layerIdx;
+    }
+
+    // add weights between current layer and previous layer, and input layer has
+    // no squashing function
+    if (layerIdx > 0) {
+      int sizePrevLayer = this.layerSizeList.get(layerIdx - 1);
+      // row count equals to size of current size and column count equals to
+      // size of previous layer
+      int row = isFinalLayer ? size : size - 1;
+      int col = sizePrevLayer;
+      DoubleMatrix weightMatrix = new DenseDoubleMatrix(row, col);
+      // initialize weights
+      weightMatrix.applyToElements(new DoubleFunction() {
+        @Override
+        public double apply(double value) {
+          return RandomUtils.nextDouble() - 0.5;
+        }
+
+        @Override
+        public double applyDerivative(double value) {
+          throw new UnsupportedOperationException("");
+        }
+      });
+      this.weightMatrixList.add(weightMatrix);
+      this.prevWeightUpdatesList.add(new DenseDoubleMatrix(row, col));
+      this.squashingFunctionList.add(squashingFunction);
+    }
+    return layerIdx;
+  }
+
+  /**
+   * Update the weight matrices with given matrices.
+   * 
+   * @param matrices
+   */
+  public void updateWeightMatrices(DoubleMatrix[] matrices) {
+    for (int i = 0; i < matrices.length; ++i) {
+      DoubleMatrix matrix = this.weightMatrixList.get(i);
+      this.weightMatrixList.set(i, matrix.add(matrices[i]));
+    }
+  }
+
+  /**
+   * Set the previous weight matrices.
+   * @param prevUpdates
+   */
+  void setPrevWeightMatrices(DoubleMatrix[] prevUpdates) {
+    this.prevWeightUpdatesList.clear();
+    Collections.addAll(this.prevWeightUpdatesList, prevUpdates);
+  }
+
+  /**
+   * Add a batch of matrices onto the given destination matrices.
+   * 
+   * @param destMatrices
+   * @param sourceMatrices
+   */
+  static void matricesAdd(DoubleMatrix[] destMatrices,
+      DoubleMatrix[] sourceMatrices) {
+    for (int i = 0; i < destMatrices.length; ++i) {
+      destMatrices[i] = destMatrices[i].add(sourceMatrices[i]);
+    }
+  }
+
+  /**
+   * Get all the weight matrices.
+   * 
+   * @return The matrices in form of matrix array.
+   */
+  DoubleMatrix[] getWeightMatrices() {
+    DoubleMatrix[] matrices = new DoubleMatrix[this.weightMatrixList.size()];
+    this.weightMatrixList.toArray(matrices);
+    return matrices;
+  }
+
+  /**
+   * Set the weight matrices.
+   * 
+   * @param matrices
+   */
+  public void setWeightMatrices(DoubleMatrix[] matrices) {
+    this.weightMatrixList = new ArrayList<DoubleMatrix>();
+    Collections.addAll(this.weightMatrixList, matrices);
+  }
+
+  /**
+   * Get the previous matrices updates in form of array.
+   * 
+   * @return The matrices in form of matrix array.
+   */
+  public DoubleMatrix[] getPrevMatricesUpdates() {
+    DoubleMatrix[] prevMatricesUpdates = new DoubleMatrix[this.prevWeightUpdatesList
+        .size()];
+    for (int i = 0; i < this.prevWeightUpdatesList.size(); ++i) {
+      prevMatricesUpdates[i] = this.prevWeightUpdatesList.get(i);
+    }
+    return prevMatricesUpdates;
+  }
+
+  public void setWeightMatrix(int index, DoubleMatrix matrix) {
+    Preconditions.checkArgument(
+        0 <= index && index < this.weightMatrixList.size(), String.format(
+            "index [%d] should be in range[%d, %d].", index, 0,
+            this.weightMatrixList.size()));
+    this.weightMatrixList.set(index, matrix);
+  }
+
+  @Override
+  public void readFields(DataInput input) throws IOException {
+    super.readFields(input);
+
+    // read squash functions
+    int squashingFunctionSize = input.readInt();
+    this.squashingFunctionList = Lists.newArrayList();
+    for (int i = 0; i < squashingFunctionSize; ++i) {
+      this.squashingFunctionList.add(FunctionFactory
+          .createDoubleFunction(WritableUtils.readString(input)));
+    }
+
+    // read weights and construct matrices of previous updates
+    int numOfMatrices = input.readInt();
+    this.weightMatrixList = Lists.newArrayList();
+    this.prevWeightUpdatesList = Lists.newArrayList();
+    for (int i = 0; i < numOfMatrices; ++i) {
+      DoubleMatrix matrix = MatrixWritable.read(input);
+      this.weightMatrixList.add(matrix);
+      this.prevWeightUpdatesList.add(new DenseDoubleMatrix(
+          matrix.getRowCount(), matrix.getColumnCount()));
+    }
+
+  }
+
+  @Override
+  public void write(DataOutput output) throws IOException {
+    super.write(output);
+
+    // write squashing functions
+    output.writeInt(this.squashingFunctionList.size());
+    for (DoubleFunction aSquashingFunctionList : this.squashingFunctionList) {
+      WritableUtils.writeString(output, aSquashingFunctionList
+              .getFunctionName());
+    }
+
+    // write weight matrices
+    output.writeInt(this.weightMatrixList.size());
+    for (DoubleMatrix aWeightMatrixList : this.weightMatrixList) {
+      MatrixWritable.write(aWeightMatrixList, output);
+    }
+
+    // DO NOT WRITE WEIGHT UPDATE
+  }
+
+  @Override
+  public DoubleMatrix getWeightsByLayer(int layerIdx) {
+    return this.weightMatrixList.get(layerIdx);
+  }
+
+  /**
+   * Get the output of the model according to given feature instance.
+   */
+  @Override
+  public DoubleVector getOutput(DoubleVector instance) {
+    Preconditions.checkArgument(this.layerSizeList.get(0) - 1 == instance
+        .getDimension(), String.format(
+        "The dimension of input instance should be %d.",
+        this.layerSizeList.get(0) - 1));
+    // transform the features to another space
+    DoubleVector transformedInstance = this.featureTransformer
+        .transform(instance);
+    // add bias feature
+    DoubleVector instanceWithBias = new DenseDoubleVector(
+        transformedInstance.getDimension() + 1);
+    instanceWithBias.set(0, 0.99999); // set bias to be a little bit less than
+                                      // 1.0
+    for (int i = 1; i < instanceWithBias.getDimension(); ++i) {
+      instanceWithBias.set(i, transformedInstance.get(i - 1));
+    }
+
+    List<DoubleVector> outputCache = getOutputInternal(instanceWithBias);
+    // return the output of the last layer
+    DoubleVector result = outputCache.get(outputCache.size() - 1);
+    // remove bias
+    return result.sliceUnsafe(1, result.getDimension() - 1);
+  }
+
+  /**
+   * Calculate output internally, the intermediate output of each layer will be
+   * stored.
+   * 
+   * @param instanceWithBias The instance contains the features.
+   * @return Cached output of each layer.
+   */
+  public List<DoubleVector> getOutputInternal(DoubleVector instanceWithBias) {
+    List<DoubleVector> outputCache = new ArrayList<DoubleVector>();
+    // fill with instance
+    DoubleVector intermediateOutput = instanceWithBias;
+    outputCache.add(intermediateOutput);
+
+    for (int i = 0; i < this.layerSizeList.size() - 1; ++i) {
+      intermediateOutput = forward(i, intermediateOutput);
+      outputCache.add(intermediateOutput);
+    }
+    return outputCache;
+  }
+
+  /**
+   * Forward the calculation for one layer.
+   * 
+   * @param fromLayer The index of the previous layer.
+   * @param intermediateOutput The intermediateOutput of previous layer.
+   * @return a new vector with the result of the operation.
+   */
+  protected DoubleVector forward(int fromLayer, DoubleVector intermediateOutput) {
+    DoubleMatrix weightMatrix = this.weightMatrixList.get(fromLayer);
+
+    DoubleVector vec = weightMatrix.multiplyVectorUnsafe(intermediateOutput);
+    vec = vec.applyToElements(this.squashingFunctionList.get(fromLayer));
+
+    // add bias
+    DoubleVector vecWithBias = new DenseDoubleVector(vec.getDimension() + 1);
+    vecWithBias.set(0, 1);
+    for (int i = 0; i < vec.getDimension(); ++i) {
+      vecWithBias.set(i + 1, vec.get(i));
+    }
+    return vecWithBias;
+  }
+
+  /**
+   * Train the model online.
+   * 
+   * @param trainingInstance
+   */
+  public void trainOnline(DoubleVector trainingInstance) {
+    DoubleMatrix[] updateMatrices = this.trainByInstance(trainingInstance);
+    this.updateWeightMatrices(updateMatrices);
+  }
+
+  @Override
+  public DoubleMatrix[] trainByInstance(DoubleVector trainingInstance) {
+    DoubleVector transformedVector = this.featureTransformer
+        .transform(trainingInstance.sliceUnsafe(this.layerSizeList.get(0) - 1));
+
+    int inputDimension = this.layerSizeList.get(0) - 1;
+    int outputDimension;
+    DoubleVector inputInstance = null;
+    DoubleVector labels = null;
+    if (this.learningStyle == LearningStyle.SUPERVISED) {
+      outputDimension = this.layerSizeList.get(this.layerSizeList.size() - 1);
+      // validate training instance
+      Preconditions.checkArgument(
+          inputDimension + outputDimension == trainingInstance.getDimension(),
+          String
+              .format(
+                  "The dimension of training instance is %d, but requires %d.",
+                  trainingInstance.getDimension(), inputDimension
+                      + outputDimension));
+
+      inputInstance = new DenseDoubleVector(this.layerSizeList.get(0));
+      inputInstance.set(0, 1); // add bias
+      // get the features from the transformed vector
+      for (int i = 0; i < inputDimension; ++i) {
+        inputInstance.set(i + 1, transformedVector.get(i));
+      }
+      // get the labels from the original training instance
+      labels = trainingInstance.sliceUnsafe(inputInstance.getDimension() - 1,
+          trainingInstance.getDimension() - 1);
+    } else if (this.learningStyle == LearningStyle.UNSUPERVISED) {
+      // labels are identical to input features
+      outputDimension = inputDimension;
+      // validate training instance
+      Preconditions.checkArgument(inputDimension == trainingInstance
+          .getDimension(), String.format(
+          "The dimension of training instance is %d, but requires %d.",
+          trainingInstance.getDimension(), inputDimension));
+
+      inputInstance = new DenseDoubleVector(this.layerSizeList.get(0));
+      inputInstance.set(0, 1); // add bias
+      // get the features from the transformed vector
+      for (int i = 0; i < inputDimension; ++i) {
+        inputInstance.set(i + 1, transformedVector.get(i));
+      }
+      // get the labels by copying the transformed vector
+      labels = transformedVector.deepCopy();
+    }
+
+    List<DoubleVector> internalResults = this.getOutputInternal(inputInstance);
+    DoubleVector output = internalResults.get(internalResults.size() - 1);
+
+    // get the training error
+    calculateTrainingError(labels,
+        output.deepCopy().sliceUnsafe(1, output.getDimension() - 1));
+
+    if (this.trainingMethod.equals(TrainingMethod.GRADIENT_DESCENT)) {
+      return this.trainByInstanceGradientDescent(labels, internalResults);
+    } else {
+      throw new IllegalArgumentException(
+          String.format("Training method is not supported."));
+    }
+  }
+
+  /**
+   * Train by gradient descent. Get the updated weights using one training
+   * instance.
+   * 
+   * @param trainingInstance
+   * @return The weight update matrices.
+   */
+  private DoubleMatrix[] trainByInstanceGradientDescent(DoubleVector labels,
+      List<DoubleVector> internalResults) {
+
+    DoubleVector output = internalResults.get(internalResults.size() - 1);
+    // initialize weight update matrices
+    DenseDoubleMatrix[] weightUpdateMatrices = new DenseDoubleMatrix[this.weightMatrixList
+        .size()];
+    for (int m = 0; m < weightUpdateMatrices.length; ++m) {
+      weightUpdateMatrices[m] = new DenseDoubleMatrix(this.weightMatrixList
+          .get(m).getRowCount(), this.weightMatrixList.get(m).getColumnCount());
+    }
+    DoubleVector deltaVec = new DenseDoubleVector(
+        this.layerSizeList.get(this.layerSizeList.size() - 1));
+
+    DoubleFunction squashingFunction = this.squashingFunctionList
+        .get(this.squashingFunctionList.size() - 1);
+
+    DoubleMatrix lastWeightMatrix = this.weightMatrixList
+        .get(this.weightMatrixList.size() - 1);
+    for (int i = 0; i < deltaVec.getDimension(); ++i) {
+      double costFuncDerivative = this.costFunction.applyDerivative(
+          labels.get(i), output.get(i + 1));
+      // add regularization
+      costFuncDerivative += this.regularizationWeight
+          * lastWeightMatrix.getRowVector(i).sum();
+      deltaVec.set(
+          i,
+          costFuncDerivative
+              * squashingFunction.applyDerivative(output.get(i + 1)));
+    }
+
+    // start from previous layer of output layer
+    for (int layer = this.layerSizeList.size() - 2; layer >= 0; --layer) {
+      output = internalResults.get(layer);
+      deltaVec = backpropagate(layer, deltaVec, internalResults,
+          weightUpdateMatrices[layer]);
+    }
+
+    this.setPrevWeightMatrices(weightUpdateMatrices);
+
+    return weightUpdateMatrices;
+  }
+
+  /**
+   * Back-propagate the errors to from next layer to current layer. The weight
+   * updated information will be stored in the weightUpdateMatrices, and the
+   * delta of the prevLayer would be returned.
+   * 
+   * @param layer Index of current layer.
+   * @param internalOutput Internal output of current layer.
+   * @param deltaVec Delta of next layer.
+   * @return the squashing function of the specified position.
+   */
+  private DoubleVector backpropagate(int curLayerIdx,
+      DoubleVector nextLayerDelta, List<DoubleVector> outputCache,
+      DenseDoubleMatrix weightUpdateMatrix) {
+
+    // get layer related information
+    DoubleFunction squashingFunction = this.squashingFunctionList
+        .get(curLayerIdx);
+    DoubleVector curLayerOutput = outputCache.get(curLayerIdx);
+    DoubleMatrix weightMatrix = this.weightMatrixList.get(curLayerIdx);
+    DoubleMatrix prevWeightMatrix = this.prevWeightUpdatesList.get(curLayerIdx);
+
+    // next layer is not output layer, remove the delta of bias neuron
+    if (curLayerIdx != this.layerSizeList.size() - 2) {
+      nextLayerDelta = nextLayerDelta.slice(1,
+          nextLayerDelta.getDimension() - 1);
+    }
+
+    DoubleVector delta = weightMatrix.transpose()
+        .multiplyVector(nextLayerDelta);
+    for (int i = 0; i < delta.getDimension(); ++i) {
+      delta.set(
+          i,
+          delta.get(i)
+              * squashingFunction.applyDerivative(curLayerOutput.get(i)));
+    }
+
+    // update weights
+    for (int i = 0; i < weightUpdateMatrix.getRowCount(); ++i) {
+      for (int j = 0; j < weightUpdateMatrix.getColumnCount(); ++j) {
+        weightUpdateMatrix.set(i, j,
+            -learningRate * nextLayerDelta.get(i) * curLayerOutput.get(j)
+                + this.momentumWeight * prevWeightMatrix.get(i, j));
+      }
+    }
+
+    return delta;
+  }
+
+  @Override
+  protected void trainInternal(Path dataInputPath,
+      Map<String, String> trainingParams) throws IOException,
+      InterruptedException, ClassNotFoundException {
+    // add all training parameters to configuration
+    Configuration conf = new Configuration();
+    for (Map.Entry<String, String> entry : trainingParams.entrySet()) {
+      conf.set(entry.getKey(), entry.getValue());
+    }
+
+    // if training parameters contains the model path, update the model path
+    String modelPath = trainingParams.get("modelPath");
+    if (modelPath != null) {
+      this.modelPath = modelPath;
+    }
+    // modelPath must be set before training
+    if (this.modelPath == null) {
+      throw new IllegalArgumentException(
+          "Please specify the modelPath for model, "
+              + "either through setModelPath() or add 'modelPath' to the training parameters.");
+    }
+
+    conf.set("modelPath", this.modelPath);
+    this.writeModelToFile();
+
+    HamaConfiguration hamaConf = new HamaConfiguration(conf);
+
+    // create job
+    BSPJob job = new BSPJob(hamaConf, SmallLayeredNeuralNetworkTrainer.class);
+    job.setJobName("Small scale Neural Network training");
+    job.setJarByClass(SmallLayeredNeuralNetworkTrainer.class);
+    job.setBspClass(SmallLayeredNeuralNetworkTrainer.class);
+    job.setInputPath(dataInputPath);
+    job.setInputFormat(org.apache.hama.bsp.SequenceFileInputFormat.class);
+    job.setInputKeyClass(LongWritable.class);
+    job.setInputValueClass(VectorWritable.class);
+    job.setOutputKeyClass(NullWritable.class);
+    job.setOutputValueClass(NullWritable.class);
+    job.setOutputFormat(org.apache.hama.bsp.NullOutputFormat.class);
+
+    int numTasks = conf.getInt("tasks", 1);
+    Log.info(String.format("Number of tasks: %d\n", numTasks));
+    job.setNumBspTask(numTasks);
+    job.waitForCompletion(true);
+
+    // reload learned model
+    Log.info(String.format("Reload model from %s.", this.modelPath));
+    this.readFromModel();
+
+  }
+
+  @Override
+  protected void calculateTrainingError(DoubleVector labels, DoubleVector output) {
+    DoubleVector errors = labels.deepCopy().applyToElements(output,
+        this.costFunction);
+    this.trainingError = errors.sum();
+  }
+
+  /**
+   * Get the squashing function of a specified layer.
+   * 
+   * @param idx
+   * @return a new vector with the result of the operation.
+   */
+  public DoubleFunction getSquashingFunction(int idx) {
+    return this.squashingFunctionList.get(idx);
+  }
+
+}
diff --git a/src/main/java/org/apache/horn/bsp/SmallLayeredNeuralNetworkMessage.java b/src/main/java/org/apache/horn/bsp/SmallLayeredNeuralNetworkMessage.java
new file mode 100644
index 0000000..2f8c287
--- /dev/null
+++ b/src/main/java/org/apache/horn/bsp/SmallLayeredNeuralNetworkMessage.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.io.Writable;
+import org.apache.hama.commons.io.MatrixWritable;
+import org.apache.hama.commons.math.DenseDoubleMatrix;
+import org.apache.hama.commons.math.DoubleMatrix;
+
+/**
+ * NeuralNetworkMessage transmits the messages between peers during the training
+ * of neural networks.
+ * 
+ */
+public class SmallLayeredNeuralNetworkMessage implements Writable {
+
+  protected double trainingError;
+  protected DoubleMatrix[] curMatrices;
+  protected DoubleMatrix[] prevMatrices;
+  protected boolean converge;
+
+  public SmallLayeredNeuralNetworkMessage() {
+  }
+  
+  public SmallLayeredNeuralNetworkMessage(double trainingError,
+      boolean converge, DoubleMatrix[] weightMatrices,
+      DoubleMatrix[] prevMatrices) {
+    this.trainingError = trainingError;
+    this.converge = converge;
+    this.curMatrices = weightMatrices;
+    this.prevMatrices = prevMatrices;
+  }
+
+  @Override
+  public void readFields(DataInput input) throws IOException {
+    trainingError = input.readDouble();
+    converge = input.readBoolean();
+    int numMatrices = input.readInt();
+    boolean hasPrevMatrices = input.readBoolean();
+    curMatrices = new DenseDoubleMatrix[numMatrices];
+    // read matrice updates
+    for (int i = 0; i < curMatrices.length; ++i) {
+      curMatrices[i] = (DenseDoubleMatrix) MatrixWritable.read(input);
+    }
+
+    if (hasPrevMatrices) {
+      prevMatrices = new DenseDoubleMatrix[numMatrices];
+      // read previous matrices updates
+      for (int i = 0; i < prevMatrices.length; ++i) {
+        prevMatrices[i] = (DenseDoubleMatrix) MatrixWritable.read(input);
+      }
+    }
+  }
+
+  @Override
+  public void write(DataOutput output) throws IOException {
+    output.writeDouble(trainingError);
+    output.writeBoolean(converge);
+    output.writeInt(curMatrices.length);
+    if (prevMatrices == null) {
+      output.writeBoolean(false);
+    } else {
+      output.writeBoolean(true);
+    }
+    for (DoubleMatrix matrix : curMatrices) {
+      MatrixWritable.write(matrix, output);
+    }
+    if (prevMatrices != null) {
+      for (DoubleMatrix matrix : prevMatrices) {
+        MatrixWritable.write(matrix, output);
+      }
+    }
+  }
+
+  public double getTrainingError() {
+    return trainingError;
+  }
+
+  public void setTrainingError(double trainingError) {
+    this.trainingError = trainingError;
+  }
+
+  public boolean isConverge() {
+    return converge;
+  }
+
+  public void setConverge(boolean converge) {
+    this.converge = converge;
+  }
+
+  public DoubleMatrix[] getCurMatrices() {
+    return curMatrices;
+  }
+
+  public void setMatrices(DoubleMatrix[] curMatrices) {
+    this.curMatrices = curMatrices;
+  }
+
+  public DoubleMatrix[] getPrevMatrices() {
+    return prevMatrices;
+  }
+
+  public void setPrevMatrices(DoubleMatrix[] prevMatrices) {
+    this.prevMatrices = prevMatrices;
+  }
+
+}
diff --git a/src/main/java/org/apache/horn/bsp/SmallLayeredNeuralNetworkTrainer.java b/src/main/java/org/apache/horn/bsp/SmallLayeredNeuralNetworkTrainer.java
new file mode 100644
index 0000000..132ec8c
--- /dev/null
+++ b/src/main/java/org/apache/horn/bsp/SmallLayeredNeuralNetworkTrainer.java
@@ -0,0 +1,244 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hama.bsp.BSP;
+import org.apache.hama.bsp.BSPPeer;
+import org.apache.hama.bsp.sync.SyncException;
+import org.apache.hama.commons.io.VectorWritable;
+import org.apache.hama.commons.math.DenseDoubleMatrix;
+import org.apache.hama.commons.math.DoubleMatrix;
+import org.apache.hama.commons.math.DoubleVector;
+import org.mortbay.log.Log;
+
+/**
+ * The trainer that train the {@link SmallLayeredNeuralNetwork} based on BSP
+ * framework.
+ * 
+ */
+public final class SmallLayeredNeuralNetworkTrainer
+    extends
+    BSP<LongWritable, VectorWritable, NullWritable, NullWritable, SmallLayeredNeuralNetworkMessage> {
+
+  private SmallLayeredNeuralNetwork inMemoryModel;
+  private Configuration conf;
+  /* Default batch size */
+  private int batchSize;
+
+  /* check the interval between intervals */
+  private double prevAvgTrainingError;
+  private double curAvgTrainingError;
+  private long convergenceCheckInterval;
+  private long iterations;
+  private long maxIterations;
+  private boolean isConverge;
+
+  private String modelPath;
+
+  @Override
+  /**
+   * If the model path is specified, load the existing from storage location.
+   */
+  public void setup(
+      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, SmallLayeredNeuralNetworkMessage> peer) {
+    if (peer.getPeerIndex() == 0) {
+      Log.info("Begin to train");
+    }
+    this.isConverge = false;
+    this.conf = peer.getConfiguration();
+    this.iterations = 0;
+    this.modelPath = conf.get("modelPath");
+    this.maxIterations = conf.getLong("training.max.iterations", 100000);
+    this.convergenceCheckInterval = conf.getLong("convergence.check.interval",
+        2000);
+    this.modelPath = conf.get("modelPath");
+    this.inMemoryModel = new SmallLayeredNeuralNetwork(modelPath);
+    this.prevAvgTrainingError = Integer.MAX_VALUE;
+    this.batchSize = conf.getInt("training.batch.size", 50);
+  }
+
+  @Override
+  /**
+   * Write the trained model back to stored location.
+   */
+  public void cleanup(
+      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, SmallLayeredNeuralNetworkMessage> peer) {
+    // write model to modelPath
+    if (peer.getPeerIndex() == 0) {
+      try {
+        Log.info(String.format("End of training, number of iterations: %d.\n",
+            this.iterations));
+        Log.info(String.format("Write model back to %s\n",
+            inMemoryModel.getModelPath()));
+        this.inMemoryModel.writeModelToFile();
+      } catch (IOException e) {
+        e.printStackTrace();
+      }
+    }
+  }
+
+  @Override
+  public void bsp(
+      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, SmallLayeredNeuralNetworkMessage> peer)
+      throws IOException, SyncException, InterruptedException {
+    while (this.iterations++ < maxIterations) {
+      // each groom calculate the matrices updates according to local data
+      calculateUpdates(peer);
+      peer.sync();
+
+      // master merge the updates model
+      if (peer.getPeerIndex() == 0) {
+        mergeUpdates(peer);
+      }
+      peer.sync();
+      if (this.isConverge) {
+        break;
+      }
+    }
+  }
+
+  /**
+   * Calculate the matrices updates according to local partition of data.
+   * 
+   * @param peer
+   * @throws IOException
+   */
+  private void calculateUpdates(
+      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, SmallLayeredNeuralNetworkMessage> peer)
+      throws IOException {
+    // receive update information from master
+    if (peer.getNumCurrentMessages() != 0) {
+      SmallLayeredNeuralNetworkMessage inMessage = peer.getCurrentMessage();
+      DoubleMatrix[] newWeights = inMessage.getCurMatrices();
+      DoubleMatrix[] preWeightUpdates = inMessage.getPrevMatrices();
+      this.inMemoryModel.setWeightMatrices(newWeights);
+      this.inMemoryModel.setPrevWeightMatrices(preWeightUpdates);
+      this.isConverge = inMessage.isConverge();
+      // check converge
+      if (isConverge) {
+        return;
+      }
+    }
+
+    DoubleMatrix[] weightUpdates = new DoubleMatrix[this.inMemoryModel.weightMatrixList
+        .size()];
+    for (int i = 0; i < weightUpdates.length; ++i) {
+      int row = this.inMemoryModel.weightMatrixList.get(i).getRowCount();
+      int col = this.inMemoryModel.weightMatrixList.get(i).getColumnCount();
+      weightUpdates[i] = new DenseDoubleMatrix(row, col);
+    }
+
+    // continue to train
+    double avgTrainingError = 0.0;
+    LongWritable key = new LongWritable();
+    VectorWritable value = new VectorWritable();
+    for (int recordsRead = 0; recordsRead < batchSize; ++recordsRead) {
+      if (!peer.readNext(key, value)) {
+        peer.reopenInput();
+        peer.readNext(key, value);
+      }
+      DoubleVector trainingInstance = value.getVector();
+      SmallLayeredNeuralNetwork.matricesAdd(weightUpdates,
+          this.inMemoryModel.trainByInstance(trainingInstance));
+      avgTrainingError += this.inMemoryModel.trainingError;
+    }
+    avgTrainingError /= batchSize;
+
+    // calculate the average of updates
+    for (int i = 0; i < weightUpdates.length; ++i) {
+      weightUpdates[i] = weightUpdates[i].divide(batchSize);
+    }
+
+    DoubleMatrix[] prevWeightUpdates = this.inMemoryModel
+        .getPrevMatricesUpdates();
+    SmallLayeredNeuralNetworkMessage outMessage = new SmallLayeredNeuralNetworkMessage(
+        avgTrainingError, false, weightUpdates, prevWeightUpdates);
+    peer.send(peer.getPeerName(0), outMessage);
+  }
+
+  /**
+   * Merge the updates according to the updates of the grooms.
+   * 
+   * @param peer
+   * @throws IOException
+   */
+  private void mergeUpdates(
+      BSPPeer<LongWritable, VectorWritable, NullWritable, NullWritable, SmallLayeredNeuralNetworkMessage> peer)
+      throws IOException {
+    int numMessages = peer.getNumCurrentMessages();
+    boolean isConverge = false;
+    if (numMessages == 0) { // converges
+      isConverge = true;
+      return;
+    }
+
+    double avgTrainingError = 0;
+    DoubleMatrix[] matricesUpdates = null;
+    DoubleMatrix[] prevMatricesUpdates = null;
+
+    while (peer.getNumCurrentMessages() > 0) {
+      SmallLayeredNeuralNetworkMessage message = peer.getCurrentMessage();
+      if (matricesUpdates == null) {
+        matricesUpdates = message.getCurMatrices();
+        prevMatricesUpdates = message.getPrevMatrices();
+      } else {
+        SmallLayeredNeuralNetwork.matricesAdd(matricesUpdates,
+            message.getCurMatrices());
+        SmallLayeredNeuralNetwork.matricesAdd(prevMatricesUpdates,
+            message.getPrevMatrices());
+      }
+      avgTrainingError += message.getTrainingError();
+    }
+
+    if (numMessages != 1) {
+      avgTrainingError /= numMessages;
+      for (int i = 0; i < matricesUpdates.length; ++i) {
+        matricesUpdates[i] = matricesUpdates[i].divide(numMessages);
+        prevMatricesUpdates[i] = prevMatricesUpdates[i].divide(numMessages);
+      }
+    }
+    this.inMemoryModel.updateWeightMatrices(matricesUpdates);
+    this.inMemoryModel.setPrevWeightMatrices(prevMatricesUpdates);
+
+    // check convergence
+    if (iterations % convergenceCheckInterval == 0) {
+      if (prevAvgTrainingError < curAvgTrainingError) {
+        // error cannot decrease any more
+        isConverge = true;
+      }
+      // update
+      prevAvgTrainingError = curAvgTrainingError;
+      curAvgTrainingError = 0;
+    }
+    curAvgTrainingError += avgTrainingError / convergenceCheckInterval;
+
+    // broadcast updated weight matrices
+    for (String peerName : peer.getAllPeerNames()) {
+      SmallLayeredNeuralNetworkMessage msg = new SmallLayeredNeuralNetworkMessage(
+          0, isConverge, this.inMemoryModel.getWeightMatrices(),
+          this.inMemoryModel.getPrevMatricesUpdates());
+      peer.send(peerName, msg);
+    }
+  }
+
+}
diff --git a/src/main/java/org/apache/horn/examples/NeuralNetwork.java b/src/main/java/org/apache/horn/examples/NeuralNetwork.java
new file mode 100644
index 0000000..249f0e8
--- /dev/null
+++ b/src/main/java/org/apache/horn/examples/NeuralNetwork.java
@@ -0,0 +1,216 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.examples;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hama.HamaConfiguration;
+import org.apache.hama.commons.math.DenseDoubleVector;
+import org.apache.hama.commons.math.DoubleVector;
+import org.apache.hama.commons.math.FunctionFactory;
+import org.apache.hama.ml.ann.SmallLayeredNeuralNetwork;
+
+/**
+ * The example of using {@link SmallLayeredNeuralNetwork}, including the
+ * training phase and labeling phase.
+ */
+public class NeuralNetwork {
+
+  public static void main(String[] args) throws Exception {
+    if (args.length < 3) {
+      printUsage();
+      return;
+    }
+    String mode = args[0];
+    if (mode.equalsIgnoreCase("label")) {
+      if (args.length < 4) {
+        printUsage();
+        return;
+      }
+      HamaConfiguration conf = new HamaConfiguration();
+
+      String featureDataPath = args[1];
+      String resultDataPath = args[2];
+      String modelPath = args[3];
+
+      SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork(modelPath);
+
+      // process data in streaming approach
+      FileSystem fs = FileSystem.get(new URI(featureDataPath), conf);
+      BufferedReader br = new BufferedReader(new InputStreamReader(
+          fs.open(new Path(featureDataPath))));
+      Path outputPath = new Path(resultDataPath);
+      if (fs.exists(outputPath)) {
+        fs.delete(outputPath, true);
+      }
+      BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
+          fs.create(outputPath)));
+
+      String line = null;
+
+      while ((line = br.readLine()) != null) {
+        if (line.trim().length() == 0) {
+          continue;
+        }
+        String[] tokens = line.trim().split(",");
+        double[] vals = new double[tokens.length];
+        for (int i = 0; i < tokens.length; ++i) {
+          vals[i] = Double.parseDouble(tokens[i]);
+        }
+        DoubleVector instance = new DenseDoubleVector(vals);
+        DoubleVector result = ann.getOutput(instance);
+        double[] arrResult = result.toArray();
+        StringBuilder sb = new StringBuilder();
+        for (int i = 0; i < arrResult.length; ++i) {
+          sb.append(arrResult[i]);
+          if (i != arrResult.length - 1) {
+            sb.append(",");
+          } else {
+            sb.append("\n");
+          }
+        }
+        bw.write(sb.toString());
+      }
+
+      br.close();
+      bw.close();
+    } else if (mode.equals("train")) {
+      if (args.length < 5) {
+        printUsage();
+        return;
+      }
+
+      String trainingDataPath = args[1];
+      String trainedModelPath = args[2];
+
+      int featureDimension = Integer.parseInt(args[3]);
+      int labelDimension = Integer.parseInt(args[4]);
+
+      int iteration = 1000;
+      double learningRate = 0.4;
+      double momemtumWeight = 0.2;
+      double regularizationWeight = 0.01;
+
+      // parse parameters
+      if (args.length >= 6) {
+        try {
+          iteration = Integer.parseInt(args[5]);
+          System.out.printf("Iteration: %d\n", iteration);
+        } catch (NumberFormatException e) {
+          System.err
+              .println("MAX_ITERATION format invalid. It should be a positive number.");
+          return;
+        }
+      }
+      if (args.length >= 7) {
+        try {
+          learningRate = Double.parseDouble(args[6]);
+          System.out.printf("Learning rate: %f\n", learningRate);
+        } catch (NumberFormatException e) {
+          System.err
+              .println("LEARNING_RATE format invalid. It should be a positive double in range (0, 1.0)");
+          return;
+        }
+      }
+      if (args.length >= 8) {
+        try {
+          momemtumWeight = Double.parseDouble(args[7]);
+          System.out.printf("Momemtum weight: %f\n", momemtumWeight);
+        } catch (NumberFormatException e) {
+          System.err
+              .println("MOMEMTUM_WEIGHT format invalid. It should be a positive double in range (0, 1.0)");
+          return;
+        }
+      }
+      if (args.length >= 9) {
+        try {
+          regularizationWeight = Double.parseDouble(args[8]);
+          System.out
+              .printf("Regularization weight: %f\n", regularizationWeight);
+        } catch (NumberFormatException e) {
+          System.err
+              .println("REGULARIZATION_WEIGHT format invalid. It should be a positive double in range (0, 1.0)");
+          return;
+        }
+      }
+
+      // train the model
+      SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+      ann.setLearningRate(learningRate);
+      ann.setMomemtumWeight(momemtumWeight);
+      ann.setRegularizationWeight(regularizationWeight);
+      ann.addLayer(featureDimension, false,
+          FunctionFactory.createDoubleFunction("Sigmoid"));
+      ann.addLayer(featureDimension, false,
+          FunctionFactory.createDoubleFunction("Sigmoid"));
+      ann.addLayer(labelDimension, true,
+          FunctionFactory.createDoubleFunction("Sigmoid"));
+      ann.setCostFunction(FunctionFactory
+          .createDoubleDoubleFunction("CrossEntropy"));
+      ann.setModelPath(trainedModelPath);
+
+      Map<String, String> trainingParameters = new HashMap<String, String>();
+      trainingParameters.put("tasks", "5");
+      trainingParameters.put("training.max.iterations", "" + iteration);
+      trainingParameters.put("training.batch.size", "300");
+      trainingParameters.put("convergence.check.interval", "1000");
+      ann.train(new Path(trainingDataPath), trainingParameters);
+    }
+
+  }
+
+  private static void printUsage() {
+    System.out
+        .println("USAGE: <MODE> <INPUT_PATH> <OUTPUT_PATH> <MODEL_PATH>|<FEATURE_DIMENSION> <LABEL_DIMENSION> [<MAX_ITERATION> <LEARNING_RATE> <MOMEMTUM_WEIGHT> <REGULARIZATION_WEIGHT>]");
+    System.out
+        .println("\tMODE\t- train: train the model with given training data.");
+    System.out
+        .println("\t\t- label: obtain the result by feeding the features to the neural network.");
+    System.out
+        .println("\tINPUT_PATH\tin 'train' mode, it is the path of the training data; in 'label' mode, it is the path of the to be evaluated data that lacks the label.");
+    System.out
+        .println("\tOUTPUT_PATH\tin 'train' mode, it is where the trained model is stored; in 'label' mode, it is where the labeled data is stored.");
+    System.out.println("\n\tConditional Parameters:");
+    System.out
+        .println("\tMODEL_PATH\tonly required in 'label' mode. It specifies where to load the trained neural network model.");
+    System.out
+        .println("\tMAX_ITERATION\tonly used in 'train' mode. It specifies how many iterations for the neural network to run. Default is 0.01.");
+    System.out
+        .println("\tLEARNING_RATE\tonly used to 'train' mode. It specifies the degree of aggregation for learning, usually in range (0, 1.0). Default is 0.1.");
+    System.out
+        .println("\tMOMEMTUM_WEIGHT\tonly used to 'train' mode. It specifies the weight of momemtum. Default is 0.");
+    System.out
+        .println("\tREGULARIZATION_WEIGHT\tonly required in 'train' model. It specifies the weight of reqularization.");
+    System.out.println("\nExample:");
+    System.out
+        .println("Train a neural network with with feature dimension 8, label dimension 1 and default setting:\n\tneuralnets train hdfs://localhost:30002/training_data hdfs://localhost:30002/model 8 1");
+    System.out
+        .println("Train a neural network with with feature dimension 8, label dimension 1 and specify learning rate as 0.1, momemtum rate as 0.2, and regularization weight as 0.01:\n\tneuralnets.train hdfs://localhost:30002/training_data hdfs://localhost:30002/model 8 1 0.1 0.2 0.01");
+    System.out
+        .println("Label the data with trained model:\n\tneuralnets evaluate hdfs://localhost:30002/unlabeled_data hdfs://localhost:30002/result hdfs://localhost:30002/model");
+  }
+
+}
diff --git a/src/test/java/org/apache/horn/bsp/MLTestBase.java b/src/test/java/org/apache/horn/bsp/MLTestBase.java
new file mode 100644
index 0000000..8001bcf
--- /dev/null
+++ b/src/test/java/org/apache/horn/bsp/MLTestBase.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * The common methods for testing machine learning algorithms
+ *
+ */
+public abstract class MLTestBase {
+
+  /**
+   * Conduct the 0-1 normalization.
+   * 
+   * @param instances
+   */
+  protected static void zeroOneNormalization(List<double[]> instanceList,
+      int len) {
+    int dimension = len;
+
+    double[] mins = new double[dimension];
+    double[] maxs = new double[dimension];
+    Arrays.fill(mins, Double.MAX_VALUE);
+    Arrays.fill(maxs, Double.MIN_VALUE);
+
+    for (double[] instance : instanceList) {
+      for (int i = 0; i < len; ++i) {
+        if (mins[i] > instance[i]) {
+          mins[i] = instance[i];
+        }
+        if (maxs[i] < instance[i]) {
+          maxs[i] = instance[i];
+        }
+      }
+    }
+
+    for (double[] instance : instanceList) {
+      for (int i = 0; i < len; ++i) {
+        double range = maxs[i] - mins[i];
+        if (range != 0) {
+          instance[i] = (instance[i] - mins[i]) / range;
+        }
+      }
+    }
+
+  }
+}
diff --git a/src/test/java/org/apache/horn/bsp/TestAutoEncoder.java b/src/test/java/org/apache/horn/bsp/TestAutoEncoder.java
new file mode 100644
index 0000000..0aaa926
--- /dev/null
+++ b/src/test/java/org/apache/horn/bsp/TestAutoEncoder.java
@@ -0,0 +1,194 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hama.commons.io.VectorWritable;
+import org.apache.hama.commons.math.DenseDoubleVector;
+import org.apache.hama.commons.math.DoubleVector;
+import org.junit.Test;
+import org.mortbay.log.Log;
+
+/**
+ * Test the functionality of {@link AutoEncoder}.
+ * 
+ */
+public class TestAutoEncoder extends MLTestBase {
+
+  @Test
+  public void testAutoEncoderSimple() {
+    double[][] instances = { { 0, 0, 0, 1 }, { 0, 0, 1, 0 }, { 0, 1, 0, 0 },
+        { 0, 0, 0, 0 } };
+    AutoEncoder encoder = new AutoEncoder(4, 2);
+    encoder.setLearningRate(0.5);
+    encoder.setMomemtumWeight(0.2);
+    
+    int maxIteration = 2000;
+    Random rnd = new Random();
+    for (int iteration = 0; iteration < maxIteration; ++iteration) {
+      for (int i = 0; i < instances.length; ++i) {
+        encoder.trainOnline(new DenseDoubleVector(instances[rnd.nextInt(instances.length)]));
+      }
+    }
+
+    for (int i = 0; i < instances.length; ++i) {
+      DoubleVector encodeVec = encoder.encode(new DenseDoubleVector(
+          instances[i]));
+      DoubleVector decodeVec = encoder.decode(encodeVec);
+      for (int d = 0; d < instances[i].length; ++d) {
+        assertEquals(instances[i][d], decodeVec.get(d), 0.1);
+      }
+    }
+
+  }
+  
+  @Test
+  public void testAutoEncoderSwissRollDataset() {
+    List<double[]> instanceList = new ArrayList<double[]>();
+    try {
+      BufferedReader br = new BufferedReader(new FileReader("src/test/resources/dimensional_reduction.txt"));
+      String line = null;
+      while ((line = br.readLine()) != null) {
+        String[] tokens = line.split("\t");
+        double[] instance = new double[tokens.length];
+        for (int i = 0; i < instance.length; ++i) {
+          instance[i] = Double.parseDouble(tokens[i]);
+        }
+        instanceList.add(instance);
+      }
+      br.close();
+      // normalize instances
+      zeroOneNormalization(instanceList, instanceList.get(0).length);
+    } catch (FileNotFoundException e) {
+      e.printStackTrace();
+    } catch (NumberFormatException e) {
+      e.printStackTrace();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    
+    List<DoubleVector> vecInstanceList = new ArrayList<DoubleVector>();
+    for (double[] instance : instanceList) {
+      vecInstanceList.add(new DenseDoubleVector(instance));
+    }
+    AutoEncoder encoder = new AutoEncoder(3, 2);
+    encoder.setLearningRate(0.05);
+    encoder.setMomemtumWeight(0.1);
+    int maxIteration = 2000;
+    for (int iteration = 0; iteration < maxIteration; ++iteration) {
+      for (DoubleVector vector : vecInstanceList) {
+        encoder.trainOnline(vector);
+      }
+    }
+
+    double errorInstance = 0;
+    for (DoubleVector vector : vecInstanceList) {
+      DoubleVector decoded = encoder.getOutput(vector);
+      DoubleVector diff = vector.subtract(decoded);
+      double error = diff.dot(diff);
+      if (error > 0.1) {
+        ++errorInstance;
+      }
+    }
+    Log.info(String.format("Autoecoder error rate: %f%%\n", errorInstance * 100 / vecInstanceList.size()));
+    
+  }
+  
+  @Test
+  public void testAutoEncoderSwissRollDatasetDistributed() {
+    String strDataPath = "/tmp/dimensional_reduction.txt";
+    Path path = new Path(strDataPath);
+    List<double[]> instanceList = new ArrayList<double[]>();
+    try {
+      Configuration conf = new Configuration();
+      FileSystem fs = FileSystem.get(new URI(strDataPath), conf);
+      if (fs.exists(path)) {
+        fs.delete(path, true);
+      }
+      
+      String line = null;
+      BufferedReader br = new BufferedReader(new FileReader("src/test/resources/dimensional_reduction.txt"));
+      while ((line = br.readLine()) != null) {
+        String[] tokens = line.split("\t");
+        double[] instance = new double[tokens.length];
+        for (int i = 0; i < instance.length; ++i) {
+          instance[i] = Double.parseDouble(tokens[i]);
+        }
+        instanceList.add(instance);
+      }
+      br.close();
+      // normalize instances
+      zeroOneNormalization(instanceList, instanceList.get(0).length);
+      
+      SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf, path, LongWritable.class, VectorWritable.class);
+      for (int i = 0; i < instanceList.size(); ++i) {
+        DoubleVector vector = new DenseDoubleVector(instanceList.get(i));
+        writer.append(new LongWritable(i), new VectorWritable(vector));
+      }
+      
+      writer.close();
+    } catch (FileNotFoundException e) {
+      e.printStackTrace();
+    } catch (IOException e) {
+      e.printStackTrace();
+    } catch (URISyntaxException e) {
+      e.printStackTrace();
+    }
+    
+    AutoEncoder encoder = new AutoEncoder(3, 2);
+    String modelPath = "/tmp/autoencoder-modelpath";
+    encoder.setModelPath(modelPath);
+    Map<String, String> trainingParams = new HashMap<String, String>();
+    encoder.setLearningRate(0.5);
+    trainingParams.put("tasks", "5");
+    trainingParams.put("training.max.iterations", "3000");
+    trainingParams.put("training.batch.size", "200");
+    encoder.train(path, trainingParams);
+    
+    double errorInstance = 0;
+    for (double[] instance : instanceList) {
+      DoubleVector vector = new DenseDoubleVector(instance);
+      DoubleVector decoded = encoder.getOutput(vector);
+      DoubleVector diff = vector.subtract(decoded);
+      double error = diff.dot(diff);
+      if (error > 0.1) {
+        ++errorInstance;
+      }
+    }
+    Log.info(String.format("Autoecoder error rate: %f%%\n", errorInstance * 100 / instanceList.size()));
+  }
+
+}
diff --git a/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetwork.java b/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetwork.java
new file mode 100644
index 0000000..85c4b7a
--- /dev/null
+++ b/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetwork.java
@@ -0,0 +1,642 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hama.commons.io.VectorWritable;
+import org.apache.hama.commons.math.DenseDoubleMatrix;
+import org.apache.hama.commons.math.DenseDoubleVector;
+import org.apache.hama.commons.math.DoubleMatrix;
+import org.apache.hama.commons.math.DoubleVector;
+import org.apache.hama.commons.math.FunctionFactory;
+import org.apache.horn.bsp.AbstractLayeredNeuralNetwork.LearningStyle;
+import org.apache.horn.bsp.AbstractLayeredNeuralNetwork.TrainingMethod;
+import org.apache.hama.ml.util.DefaultFeatureTransformer;
+import org.apache.hama.ml.util.FeatureTransformer;
+import org.junit.Test;
+import org.mortbay.log.Log;
+
+/**
+ * Test the functionality of SmallLayeredNeuralNetwork.
+ * 
+ */
+public class TestSmallLayeredNeuralNetwork extends MLTestBase {
+
+  @Test
+  public void testReadWrite() {
+    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+    ann.addLayer(2, false,
+        FunctionFactory.createDoubleFunction("IdentityFunction"));
+    ann.addLayer(5, false,
+        FunctionFactory.createDoubleFunction("IdentityFunction"));
+    ann.addLayer(1, true,
+        FunctionFactory.createDoubleFunction("IdentityFunction"));
+    ann.setCostFunction(FunctionFactory
+        .createDoubleDoubleFunction("SquaredError"));
+    double learningRate = 0.2;
+    ann.setLearningRate(learningRate);
+    double momentumWeight = 0.5;
+    ann.setMomemtumWeight(momentumWeight);
+    double regularizationWeight = 0.05;
+    ann.setRegularizationWeight(regularizationWeight);
+    // intentionally initialize all weights to 0.5
+    DoubleMatrix[] matrices = new DenseDoubleMatrix[2];
+    matrices[0] = new DenseDoubleMatrix(5, 3, 0.2);
+    matrices[1] = new DenseDoubleMatrix(1, 6, 0.8);
+    ann.setWeightMatrices(matrices);
+    ann.setLearningStyle(LearningStyle.UNSUPERVISED);
+    
+    FeatureTransformer defaultFeatureTransformer = new DefaultFeatureTransformer();
+    ann.setFeatureTransformer(defaultFeatureTransformer);
+    
+
+    // write to file
+    String modelPath = "/tmp/testSmallLayeredNeuralNetworkReadWrite";
+    ann.setModelPath(modelPath);
+    try {
+      ann.writeModelToFile();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+
+    // read from file
+    SmallLayeredNeuralNetwork annCopy = new SmallLayeredNeuralNetwork(modelPath);
+    assertEquals(annCopy.getClass().getSimpleName(), annCopy.getModelType());
+    assertEquals(modelPath, annCopy.getModelPath());
+    assertEquals(learningRate, annCopy.getLearningRate(), 0.000001);
+    assertEquals(momentumWeight, annCopy.getMomemtumWeight(), 0.000001);
+    assertEquals(regularizationWeight, annCopy.getRegularizationWeight(),
+        0.000001);
+    assertEquals(TrainingMethod.GRADIENT_DESCENT, annCopy.getTrainingMethod());
+    assertEquals(LearningStyle.UNSUPERVISED, annCopy.getLearningStyle());
+
+    // compare weights
+    DoubleMatrix[] weightsMatrices = annCopy.getWeightMatrices();
+    for (int i = 0; i < weightsMatrices.length; ++i) {
+      DoubleMatrix expectMat = matrices[i];
+      DoubleMatrix actualMat = weightsMatrices[i];
+      for (int j = 0; j < expectMat.getRowCount(); ++j) {
+        for (int k = 0; k < expectMat.getColumnCount(); ++k) {
+          assertEquals(expectMat.get(j, k), actualMat.get(j, k), 0.000001);
+        }
+      }
+    }
+    
+    FeatureTransformer copyTransformer = annCopy.getFeatureTransformer();
+    assertEquals(defaultFeatureTransformer.getClass().getName(), copyTransformer.getClass().getName());
+  }
+
+  @Test
+  /**
+   * Test the forward functionality.
+   */
+  public void testOutput() {
+    // first network
+    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+    ann.addLayer(2, false,
+        FunctionFactory.createDoubleFunction("IdentityFunction"));
+    ann.addLayer(5, false,
+        FunctionFactory.createDoubleFunction("IdentityFunction"));
+    ann.addLayer(1, true,
+        FunctionFactory.createDoubleFunction("IdentityFunction"));
+    ann.setCostFunction(FunctionFactory
+        .createDoubleDoubleFunction("SquaredError"));
+    ann.setLearningRate(0.1);
+    // intentionally initialize all weights to 0.5
+    DoubleMatrix[] matrices = new DenseDoubleMatrix[2];
+    matrices[0] = new DenseDoubleMatrix(5, 3, 0.5);
+    matrices[1] = new DenseDoubleMatrix(1, 6, 0.5);
+    ann.setWeightMatrices(matrices);
+
+    double[] arr = new double[] { 0, 1 };
+    DoubleVector training = new DenseDoubleVector(arr);
+    DoubleVector result = ann.getOutput(training);
+    assertEquals(1, result.getDimension());
+    // assertEquals(3, result.get(0), 0.000001);
+
+    // second network
+    SmallLayeredNeuralNetwork ann2 = new SmallLayeredNeuralNetwork();
+    ann2.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann2.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann2.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann2.setCostFunction(FunctionFactory
+        .createDoubleDoubleFunction("SquaredError"));
+    ann2.setLearningRate(0.3);
+    // intentionally initialize all weights to 0.5
+    DoubleMatrix[] matrices2 = new DenseDoubleMatrix[2];
+    matrices2[0] = new DenseDoubleMatrix(3, 3, 0.5);
+    matrices2[1] = new DenseDoubleMatrix(1, 4, 0.5);
+    ann2.setWeightMatrices(matrices2);
+
+    double[] test = { 0, 0 };
+    double[] result2 = { 0.807476 };
+
+    DoubleVector vec = ann2.getOutput(new DenseDoubleVector(test));
+    assertArrayEquals(result2, vec.toArray(), 0.000001);
+
+    SmallLayeredNeuralNetwork ann3 = new SmallLayeredNeuralNetwork();
+    ann3.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann3.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann3.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann3.setCostFunction(FunctionFactory
+        .createDoubleDoubleFunction("SquaredError"));
+    ann3.setLearningRate(0.3);
+    // intentionally initialize all weights to 0.5
+    DoubleMatrix[] initMatrices = new DenseDoubleMatrix[2];
+    initMatrices[0] = new DenseDoubleMatrix(3, 3, 0.5);
+    initMatrices[1] = new DenseDoubleMatrix(1, 4, 0.5);
+    ann3.setWeightMatrices(initMatrices);
+
+    double[] instance = { 0, 1 };
+    DoubleVector output = ann3.getOutput(new DenseDoubleVector(instance));
+    assertEquals(0.8315410, output.get(0), 0.000001);
+  }
+
+  @Test
+  public void testXORlocal() {
+    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+    ann.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.setCostFunction(FunctionFactory
+        .createDoubleDoubleFunction("SquaredError"));
+    ann.setLearningRate(0.5);
+    ann.setMomemtumWeight(0.0);
+
+    int iterations = 50000; // iteration should be set to a very large number
+    double[][] instances = { { 0, 1, 1 }, { 0, 0, 0 }, { 1, 0, 1 }, { 1, 1, 0 } };
+    for (int i = 0; i < iterations; ++i) {
+      DoubleMatrix[] matrices = null;
+      for (int j = 0; j < instances.length; ++j) {
+        matrices = ann.trainByInstance(new DenseDoubleVector(instances[j
+            % instances.length]));
+        ann.updateWeightMatrices(matrices);
+      }
+    }
+
+    for (int i = 0; i < instances.length; ++i) {
+      DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
+      // the expected output is the last element in array
+      double result = instances[i][2];
+      double actual = ann.getOutput(input).get(0);
+      if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
+        Log.info("Neural network failes to lear the XOR.");
+      }
+    }
+
+    // write model into file and read out
+    String modelPath = "/tmp/testSmallLayeredNeuralNetworkXORLocal";
+    ann.setModelPath(modelPath);
+    try {
+      ann.writeModelToFile();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    SmallLayeredNeuralNetwork annCopy = new SmallLayeredNeuralNetwork(modelPath);
+    // test on instances
+    for (int i = 0; i < instances.length; ++i) {
+      DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
+      // the expected output is the last element in array
+      double result = instances[i][2];
+      double actual = annCopy.getOutput(input).get(0);
+      if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
+        Log.info("Neural network failes to lear the XOR.");
+      }
+    }
+  }
+
+  @Test
+  public void testXORWithMomentum() {
+    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+    ann.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.setCostFunction(FunctionFactory
+        .createDoubleDoubleFunction("SquaredError"));
+    ann.setLearningRate(0.6);
+    ann.setMomemtumWeight(0.3);
+
+    int iterations = 2000; // iteration should be set to a very large number
+    double[][] instances = { { 0, 1, 1 }, { 0, 0, 0 }, { 1, 0, 1 }, { 1, 1, 0 } };
+    for (int i = 0; i < iterations; ++i) {
+      for (int j = 0; j < instances.length; ++j) {
+        ann.trainOnline(new DenseDoubleVector(instances[j % instances.length]));
+      }
+    }
+
+    for (int i = 0; i < instances.length; ++i) {
+      DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
+      // the expected output is the last element in array
+      double result = instances[i][2];
+      double actual = ann.getOutput(input).get(0);
+      if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
+        Log.info("Neural network failes to lear the XOR.");
+      }
+    }
+
+    // write model into file and read out
+    String modelPath = "/tmp/testSmallLayeredNeuralNetworkXORLocalWithMomentum";
+    ann.setModelPath(modelPath);
+    try {
+      ann.writeModelToFile();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    SmallLayeredNeuralNetwork annCopy = new SmallLayeredNeuralNetwork(modelPath);
+    // test on instances
+    for (int i = 0; i < instances.length; ++i) {
+      DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
+      // the expected output is the last element in array
+      double result = instances[i][2];
+      double actual = annCopy.getOutput(input).get(0);
+      if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
+        Log.info("Neural network failes to lear the XOR.");
+      }
+    }
+  }
+
+  @Test
+  public void testXORLocalWithRegularization() {
+    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+    ann.addLayer(2, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(3, false, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.setCostFunction(FunctionFactory
+        .createDoubleDoubleFunction("SquaredError"));
+    ann.setLearningRate(0.7);
+    ann.setMomemtumWeight(0.5);
+    ann.setRegularizationWeight(0.002);
+
+    int iterations = 5000; // iteration should be set to a very large number
+    double[][] instances = { { 0, 1, 1 }, { 0, 0, 0 }, { 1, 0, 1 }, { 1, 1, 0 } };
+    for (int i = 0; i < iterations; ++i) {
+      for (int j = 0; j < instances.length; ++j) {
+        ann.trainOnline(new DenseDoubleVector(instances[j % instances.length]));
+      }
+    }
+
+    for (int i = 0; i < instances.length; ++i) {
+      DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
+      // the expected output is the last element in array
+      double result = instances[i][2];
+      double actual = ann.getOutput(input).get(0);
+      if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
+        Log.info("Neural network failes to lear the XOR.");
+      }
+    }
+
+    // write model into file and read out
+    String modelPath = "/tmp/testSmallLayeredNeuralNetworkXORLocalWithRegularization";
+    ann.setModelPath(modelPath);
+    try {
+      ann.writeModelToFile();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    SmallLayeredNeuralNetwork annCopy = new SmallLayeredNeuralNetwork(modelPath);
+    // test on instances
+    for (int i = 0; i < instances.length; ++i) {
+      DoubleVector input = new DenseDoubleVector(instances[i]).slice(2);
+      // the expected output is the last element in array
+      double result = instances[i][2];
+      double actual = annCopy.getOutput(input).get(0);
+      if (result < 0.5 && actual >= 0.5 || result >= 0.5 && actual < 0.5) {
+        Log.info("Neural network failes to lear the XOR.");
+      }
+    }
+  }
+
+  @Test
+  public void testTwoClassClassification() {
+    // use logistic regression data
+    String filepath = "src/test/resources/logistic_regression_data.txt";
+    List<double[]> instanceList = new ArrayList<double[]>();
+
+    try {
+      BufferedReader br = new BufferedReader(new FileReader(filepath));
+      String line = null;
+      while ((line = br.readLine()) != null) {
+        String[] tokens = line.trim().split(",");
+        double[] instance = new double[tokens.length];
+        for (int i = 0; i < tokens.length; ++i) {
+          instance[i] = Double.parseDouble(tokens[i]);
+        }
+        instanceList.add(instance);
+      }
+      br.close();
+    } catch (FileNotFoundException e) {
+      e.printStackTrace();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+
+    zeroOneNormalization(instanceList, instanceList.get(0).length - 1);
+    
+    int dimension = instanceList.get(0).length - 1;
+
+    // divide dataset into training and testing
+    List<double[]> testInstances = new ArrayList<double[]>();
+    testInstances.addAll(instanceList.subList(instanceList.size() - 100,
+        instanceList.size()));
+    List<double[]> trainingInstances = instanceList.subList(0,
+        instanceList.size() - 100);
+
+    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+    ann.setLearningRate(0.001);
+    ann.setMomemtumWeight(0.1);
+    ann.setRegularizationWeight(0.01);
+    ann.addLayer(dimension, false,
+        FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(dimension, false,
+        FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(dimension, false,
+        FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.setCostFunction(FunctionFactory
+        .createDoubleDoubleFunction("CrossEntropy"));
+
+    long start = new Date().getTime();
+    int iterations = 1000;
+    for (int i = 0; i < iterations; ++i) {
+      for (double[] trainingInstance : trainingInstances) {
+        ann.trainOnline(new DenseDoubleVector(trainingInstance));
+      }
+    }
+    long end = new Date().getTime();
+    Log.info(String.format("Training time: %fs\n",
+        (double) (end - start) / 1000));
+
+    double errorRate = 0;
+    // calculate the error on test instance
+    for (double[] testInstance : testInstances) {
+      DoubleVector instance = new DenseDoubleVector(testInstance);
+      double expected = instance.get(instance.getDimension() - 1);
+      instance = instance.slice(instance.getDimension() - 1);
+      double actual = ann.getOutput(instance).get(0);
+      if (actual < 0.5 && expected >= 0.5 || actual >= 0.5 && expected < 0.5) {
+        ++errorRate;
+      }
+    }
+    errorRate /= testInstances.size();
+
+    Log.info(String.format("Relative error: %f%%\n", errorRate * 100));
+  }
+  
+  @Test
+  public void testLogisticRegression() {
+    this.testLogisticRegressionDistributedVersion();
+    this.testLogisticRegressionDistributedVersionWithFeatureTransformer();
+  }
+
+  public void testLogisticRegressionDistributedVersion() {
+    // write data into a sequence file
+    String tmpStrDatasetPath = "/tmp/logistic_regression_data";
+    Path tmpDatasetPath = new Path(tmpStrDatasetPath);
+    String strDataPath = "src/test/resources/logistic_regression_data.txt";
+    String modelPath = "/tmp/logistic-regression-distributed-model";
+
+    Configuration conf = new Configuration();
+    List<double[]> instanceList = new ArrayList<double[]>();
+    List<double[]> trainingInstances = null;
+    List<double[]> testInstances = null;
+
+    try {
+      FileSystem fs = FileSystem.get(new URI(tmpStrDatasetPath), conf);
+      fs.delete(tmpDatasetPath, true);
+      if (fs.exists(tmpDatasetPath)) {
+        fs.createNewFile(tmpDatasetPath);
+      }
+
+      BufferedReader br = new BufferedReader(new FileReader(strDataPath));
+      String line = null;
+      int count = 0;
+      while ((line = br.readLine()) != null) {
+        String[] tokens = line.trim().split(",");
+        double[] instance = new double[tokens.length];
+        for (int i = 0; i < tokens.length; ++i) {
+          instance[i] = Double.parseDouble(tokens[i]);
+        }
+        instanceList.add(instance);
+      }
+      br.close();
+      
+      zeroOneNormalization(instanceList, instanceList.get(0).length - 1);
+      
+      // write training data to temporal sequence file
+      SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
+          tmpDatasetPath, LongWritable.class, VectorWritable.class);
+      int testSize = 150;
+
+      Collections.shuffle(instanceList);
+      testInstances = new ArrayList<double[]>();
+      testInstances.addAll(instanceList.subList(instanceList.size() - testSize,
+          instanceList.size()));
+      trainingInstances = instanceList.subList(0, instanceList.size()
+          - testSize);
+
+      for (double[] instance : trainingInstances) {
+        DoubleVector vec = new DenseDoubleVector(instance);
+        writer.append(new LongWritable(count++), new VectorWritable(vec));
+      }
+      writer.close();
+    } catch (FileNotFoundException e) {
+      e.printStackTrace();
+    } catch (IOException e) {
+      e.printStackTrace();
+    } catch (URISyntaxException e) {
+      e.printStackTrace();
+    }
+
+    // create model
+    int dimension = 8;
+    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+    ann.setLearningRate(0.7);
+    ann.setMomemtumWeight(0.5);
+    ann.setRegularizationWeight(0.1);
+    ann.addLayer(dimension, false,
+        FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(dimension, false,
+        FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(dimension, false,
+        FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.setCostFunction(FunctionFactory
+        .createDoubleDoubleFunction("CrossEntropy"));
+    ann.setModelPath(modelPath);
+
+    long start = new Date().getTime();
+    Map<String, String> trainingParameters = new HashMap<String, String>();
+    trainingParameters.put("tasks", "5");
+    trainingParameters.put("training.max.iterations", "2000");
+    trainingParameters.put("training.batch.size", "300");
+    trainingParameters.put("convergence.check.interval", "1000");
+    ann.train(tmpDatasetPath, trainingParameters);
+
+    long end = new Date().getTime();
+
+    // validate results
+    double errorRate = 0;
+    // calculate the error on test instance
+    for (double[] testInstance : testInstances) {
+      DoubleVector instance = new DenseDoubleVector(testInstance);
+      double expected = instance.get(instance.getDimension() - 1);
+      instance = instance.slice(instance.getDimension() - 1);
+      double actual = ann.getOutput(instance).get(0);
+      if (actual < 0.5 && expected >= 0.5 || actual >= 0.5 && expected < 0.5) {
+        ++errorRate;
+      }
+    }
+    errorRate /= testInstances.size();
+
+    Log.info(String.format("Training time: %fs\n",
+        (double) (end - start) / 1000));
+    Log.info(String.format("Relative error: %f%%\n", errorRate * 100));
+  }
+  
+  public void testLogisticRegressionDistributedVersionWithFeatureTransformer() {
+    // write data into a sequence file
+    String tmpStrDatasetPath = "/tmp/logistic_regression_data_feature_transformer";
+    Path tmpDatasetPath = new Path(tmpStrDatasetPath);
+    String strDataPath = "src/test/resources/logistic_regression_data.txt";
+    String modelPath = "/tmp/logistic-regression-distributed-model-feature-transformer";
+
+    Configuration conf = new Configuration();
+    List<double[]> instanceList = new ArrayList<double[]>();
+    List<double[]> trainingInstances = null;
+    List<double[]> testInstances = null;
+
+    try {
+      FileSystem fs = FileSystem.get(new URI(tmpStrDatasetPath), conf);
+      fs.delete(tmpDatasetPath, true);
+      if (fs.exists(tmpDatasetPath)) {
+        fs.createNewFile(tmpDatasetPath);
+      }
+
+      BufferedReader br = new BufferedReader(new FileReader(strDataPath));
+      String line = null;
+      int count = 0;
+      while ((line = br.readLine()) != null) {
+        String[] tokens = line.trim().split(",");
+        double[] instance = new double[tokens.length];
+        for (int i = 0; i < tokens.length; ++i) {
+          instance[i] = Double.parseDouble(tokens[i]);
+        }
+        instanceList.add(instance);
+      }
+      br.close();
+      
+      zeroOneNormalization(instanceList, instanceList.get(0).length - 1);
+      
+      // write training data to temporal sequence file
+      SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
+          tmpDatasetPath, LongWritable.class, VectorWritable.class);
+      int testSize = 150;
+
+      Collections.shuffle(instanceList);
+      testInstances = new ArrayList<double[]>();
+      testInstances.addAll(instanceList.subList(instanceList.size() - testSize,
+          instanceList.size()));
+      trainingInstances = instanceList.subList(0, instanceList.size()
+          - testSize);
+
+      for (double[] instance : trainingInstances) {
+        DoubleVector vec = new DenseDoubleVector(instance);
+        writer.append(new LongWritable(count++), new VectorWritable(vec));
+      }
+      writer.close();
+    } catch (FileNotFoundException e) {
+      e.printStackTrace();
+    } catch (IOException e) {
+      e.printStackTrace();
+    } catch (URISyntaxException e) {
+      e.printStackTrace();
+    }
+
+    // create model
+    int dimension = 8;
+    SmallLayeredNeuralNetwork ann = new SmallLayeredNeuralNetwork();
+    ann.setLearningRate(0.7);
+    ann.setMomemtumWeight(0.5);
+    ann.setRegularizationWeight(0.1);
+    ann.addLayer(dimension, false,
+        FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(dimension, false,
+        FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(dimension, false,
+        FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.addLayer(1, true, FunctionFactory.createDoubleFunction("Sigmoid"));
+    ann.setCostFunction(FunctionFactory
+        .createDoubleDoubleFunction("CrossEntropy"));
+    ann.setModelPath(modelPath);
+    
+    FeatureTransformer featureTransformer = new DefaultFeatureTransformer();
+    
+    ann.setFeatureTransformer(featureTransformer);
+
+    long start = new Date().getTime();
+    Map<String, String> trainingParameters = new HashMap<String, String>();
+    trainingParameters.put("tasks", "5");
+    trainingParameters.put("training.max.iterations", "2000");
+    trainingParameters.put("training.batch.size", "300");
+    trainingParameters.put("convergence.check.interval", "1000");
+    ann.train(tmpDatasetPath, trainingParameters);
+    
+
+    long end = new Date().getTime();
+
+    // validate results
+    double errorRate = 0;
+    // calculate the error on test instance
+    for (double[] testInstance : testInstances) {
+      DoubleVector instance = new DenseDoubleVector(testInstance);
+      double expected = instance.get(instance.getDimension() - 1);
+      instance = instance.slice(instance.getDimension() - 1);
+      instance = featureTransformer.transform(instance);
+      double actual = ann.getOutput(instance).get(0);
+      if (actual < 0.5 && expected >= 0.5 || actual >= 0.5 && expected < 0.5) {
+        ++errorRate;
+      }
+    }
+    errorRate /= testInstances.size();
+
+    Log.info(String.format("Training time: %fs\n",
+        (double) (end - start) / 1000));
+    Log.info(String.format("Relative error: %f%%\n", errorRate * 100));
+  }
+
+}
diff --git a/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetworkMessage.java b/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetworkMessage.java
new file mode 100644
index 0000000..e422d95
--- /dev/null
+++ b/src/test/java/org/apache/horn/bsp/TestSmallLayeredNeuralNetworkMessage.java
@@ -0,0 +1,172 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.bsp;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hama.commons.math.DenseDoubleMatrix;
+import org.apache.hama.commons.math.DoubleMatrix;
+import org.junit.Test;
+
+/**
+ * Test the functionalities of SmallLayeredNeuralNetworkMessage.
+ * 
+ */
+public class TestSmallLayeredNeuralNetworkMessage {
+
+  @Test
+  public void testReadWriteWithoutPrev() {
+    double error = 0.22;
+    double[][] matrix1 = new double[][] { { 0.1, 0.2, 0.8, 0.5 },
+        { 0.3, 0.4, 0.6, 0.2 }, { 0.5, 0.6, 0.1, 0.5 } };
+    double[][] matrix2 = new double[][] { { 0.8, 1.2, 0.5 } };
+    DoubleMatrix[] matrices = new DoubleMatrix[2];
+    matrices[0] = new DenseDoubleMatrix(matrix1);
+    matrices[1] = new DenseDoubleMatrix(matrix2);
+
+    boolean isConverge = false;
+
+    SmallLayeredNeuralNetworkMessage message = new SmallLayeredNeuralNetworkMessage(
+        error, isConverge, matrices, null);
+    Configuration conf = new Configuration();
+    String strPath = "/tmp/testReadWriteSmallLayeredNeuralNetworkMessage";
+    Path path = new Path(strPath);
+    try {
+      FileSystem fs = FileSystem.get(new URI(strPath), conf);
+      FSDataOutputStream out = fs.create(path);
+      message.write(out);
+      out.close();
+
+      FSDataInputStream in = fs.open(path);
+      SmallLayeredNeuralNetworkMessage readMessage = new SmallLayeredNeuralNetworkMessage(
+          0, isConverge, null, null);
+      readMessage.readFields(in);
+      in.close();
+      assertEquals(error, readMessage.getTrainingError(), 0.000001);
+      assertFalse(readMessage.isConverge());
+      DoubleMatrix[] readMatrices = readMessage.getCurMatrices();
+      assertEquals(2, readMatrices.length);
+      for (int i = 0; i < readMatrices.length; ++i) {
+        double[][] doubleMatrices = ((DenseDoubleMatrix) readMatrices[i])
+            .getValues();
+        double[][] doubleExpected = ((DenseDoubleMatrix) matrices[i])
+            .getValues();
+        for (int r = 0; r < doubleMatrices.length; ++r) {
+          assertArrayEquals(doubleExpected[r], doubleMatrices[r], 0.000001);
+        }
+      }
+
+      DoubleMatrix[] readPrevMatrices = readMessage.getPrevMatrices();
+      assertNull(readPrevMatrices);
+
+      // delete
+      fs.delete(path, true);
+    } catch (IOException e) {
+      e.printStackTrace();
+    } catch (URISyntaxException e) {
+      e.printStackTrace();
+    }
+  }
+
+  @Test
+  public void testReadWriteWithPrev() {
+    double error = 0.22;
+    boolean isConverge = true;
+
+    double[][] matrix1 = new double[][] { { 0.1, 0.2, 0.8, 0.5 },
+        { 0.3, 0.4, 0.6, 0.2 }, { 0.5, 0.6, 0.1, 0.5 } };
+    double[][] matrix2 = new double[][] { { 0.8, 1.2, 0.5 } };
+    DoubleMatrix[] matrices = new DoubleMatrix[2];
+    matrices[0] = new DenseDoubleMatrix(matrix1);
+    matrices[1] = new DenseDoubleMatrix(matrix2);
+
+    double[][] prevMatrix1 = new double[][] { { 0.1, 0.1, 0.2, 0.3 },
+        { 0.2, 0.4, 0.1, 0.5 }, { 0.5, 0.1, 0.5, 0.2 } };
+    double[][] prevMatrix2 = new double[][] { { 0.1, 0.2, 0.5, 0.9 },
+        { 0.3, 0.5, 0.2, 0.6 }, { 0.6, 0.8, 0.7, 0.5 } };
+
+    DoubleMatrix[] prevMatrices = new DoubleMatrix[2];
+    prevMatrices[0] = new DenseDoubleMatrix(prevMatrix1);
+    prevMatrices[1] = new DenseDoubleMatrix(prevMatrix2);
+
+    SmallLayeredNeuralNetworkMessage message = new SmallLayeredNeuralNetworkMessage(
+        error, isConverge, matrices, prevMatrices);
+    Configuration conf = new Configuration();
+    String strPath = "/tmp/testReadWriteSmallLayeredNeuralNetworkMessageWithPrev";
+    Path path = new Path(strPath);
+    try {
+      FileSystem fs = FileSystem.get(new URI(strPath), conf);
+      FSDataOutputStream out = fs.create(path);
+      message.write(out);
+      out.close();
+
+      FSDataInputStream in = fs.open(path);
+      SmallLayeredNeuralNetworkMessage readMessage = new SmallLayeredNeuralNetworkMessage(
+          0, isConverge, null, null);
+      readMessage.readFields(in);
+      in.close();
+
+      assertTrue(readMessage.isConverge());
+
+      DoubleMatrix[] readMatrices = readMessage.getCurMatrices();
+      assertEquals(2, readMatrices.length);
+      for (int i = 0; i < readMatrices.length; ++i) {
+        double[][] doubleMatrices = ((DenseDoubleMatrix) readMatrices[i])
+            .getValues();
+        double[][] doubleExpected = ((DenseDoubleMatrix) matrices[i])
+            .getValues();
+        for (int r = 0; r < doubleMatrices.length; ++r) {
+          assertArrayEquals(doubleExpected[r], doubleMatrices[r], 0.000001);
+        }
+      }
+
+      DoubleMatrix[] readPrevMatrices = readMessage.getPrevMatrices();
+      assertEquals(2, readPrevMatrices.length);
+      for (int i = 0; i < readPrevMatrices.length; ++i) {
+        double[][] doubleMatrices = ((DenseDoubleMatrix) readPrevMatrices[i])
+            .getValues();
+        double[][] doubleExpected = ((DenseDoubleMatrix) prevMatrices[i])
+            .getValues();
+        for (int r = 0; r < doubleMatrices.length; ++r) {
+          assertArrayEquals(doubleExpected[r], doubleMatrices[r], 0.000001);
+        }
+      }
+
+      // delete
+      fs.delete(path, true);
+    } catch (IOException e) {
+      e.printStackTrace();
+    } catch (URISyntaxException e) {
+      e.printStackTrace();
+    }
+  }
+
+}
diff --git a/src/test/java/org/apache/horn/examples/NeuralNetworkTest.java b/src/test/java/org/apache/horn/examples/NeuralNetworkTest.java
new file mode 100644
index 0000000..462140c
--- /dev/null
+++ b/src/test/java/org/apache/horn/examples/NeuralNetworkTest.java
@@ -0,0 +1,140 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.horn.examples;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hama.HamaConfiguration;
+import org.apache.hama.commons.io.VectorWritable;
+import org.apache.hama.commons.math.DenseDoubleVector;
+
+/**
+ * Test the functionality of NeuralNetwork Example.
+ * 
+ */
+public class NeuralNetworkTest extends TestCase {
+  private Configuration conf = new HamaConfiguration();
+  private FileSystem fs;
+  private String MODEL_PATH = "/tmp/neuralnets.model";
+  private String RESULT_PATH = "/tmp/neuralnets.txt";
+  private String SEQTRAIN_DATA = "/tmp/test-neuralnets.data";
+  
+  @Override
+  protected void setUp() throws Exception {
+    super.setUp();
+    fs = FileSystem.get(conf);
+  }
+
+  public void testNeuralnetsLabeling() throws IOException {
+    this.neuralNetworkTraining();
+
+    String dataPath = "src/test/resources/neuralnets_classification_test.txt";
+    String mode = "label";
+    try {
+      NeuralNetwork
+          .main(new String[] { mode, dataPath, RESULT_PATH, MODEL_PATH });
+
+      // compare results with ground-truth
+      BufferedReader groundTruthReader = new BufferedReader(new FileReader(
+          "src/test/resources/neuralnets_classification_label.txt"));
+      List<Double> groundTruthList = new ArrayList<Double>();
+      String line = null;
+      while ((line = groundTruthReader.readLine()) != null) {
+        groundTruthList.add(Double.parseDouble(line));
+      }
+      groundTruthReader.close();
+
+      BufferedReader resultReader = new BufferedReader(new FileReader(
+          RESULT_PATH));
+      List<Double> resultList = new ArrayList<Double>();
+      while ((line = resultReader.readLine()) != null) {
+        resultList.add(Double.parseDouble(line));
+      }
+      resultReader.close();
+      int total = resultList.size();
+      double correct = 0;
+      for (int i = 0; i < groundTruthList.size(); ++i) {
+        double actual = resultList.get(i);
+        double expected = groundTruthList.get(i);
+        if (actual < 0.5 && expected < 0.5 || actual >= 0.5 && expected >= 0.5) {
+          ++correct;
+        }
+      }
+      System.out.printf("Precision: %f\n", correct / total);
+
+    } catch (Exception e) {
+      e.printStackTrace();
+    } finally {
+      fs.delete(new Path(RESULT_PATH), true);
+      fs.delete(new Path(MODEL_PATH), true);
+      fs.delete(new Path(SEQTRAIN_DATA), true);
+    }
+  }
+
+  private void neuralNetworkTraining() {
+    String mode = "train";
+    String strTrainingDataPath = "src/test/resources/neuralnets_classification_training.txt";
+    int featureDimension = 8;
+    int labelDimension = 1;
+
+    Path sequenceTrainingDataPath = new Path(SEQTRAIN_DATA);
+    Configuration conf = new Configuration();
+    FileSystem fs;
+    try {
+      fs = FileSystem.get(conf);
+      SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
+          sequenceTrainingDataPath, LongWritable.class, VectorWritable.class);
+      BufferedReader br = new BufferedReader(
+          new FileReader(strTrainingDataPath));
+      String line = null;
+      // convert the data in sequence file format
+      while ((line = br.readLine()) != null) {
+        String[] tokens = line.split(",");
+        double[] vals = new double[tokens.length];
+        for (int i = 0; i < tokens.length; ++i) {
+          vals[i] = Double.parseDouble(tokens[i]);
+        }
+        writer.append(new LongWritable(), new VectorWritable(
+            new DenseDoubleVector(vals)));
+      }
+      writer.close();
+      br.close();
+    } catch (IOException e1) {
+      e1.printStackTrace();
+    }
+
+    try {
+      NeuralNetwork.main(new String[] { mode, SEQTRAIN_DATA,
+          MODEL_PATH, "" + featureDimension, "" + labelDimension });
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+
+}
diff --git a/src/test/resources/dimensional_reduction.txt b/src/test/resources/dimensional_reduction.txt
new file mode 100644
index 0000000..0528d42
--- /dev/null
+++ b/src/test/resources/dimensional_reduction.txt
@@ -0,0 +1,1600 @@
+-5.214647	7.087974	6.729247
+-0.421762	8.426638	7.896089
+-6.134601	5.686436	6.088567
+6.212569	8.307839	2.362335
+6.344603	7.664955	0.532386
+0.098673	6.992156	7.840777
+2.261138	8.088781	7.203263
+6.319323	6.770813	1.717501
+3.039391	6.589591	6.782867
+5.188841	7.283918	-2.624659
+-2.593684	7.969614	7.754495
+0.757685	7.698148	7.719040
+-2.493988	7.895625	7.774168
+-6.348292	9.235869	5.912167
+5.811158	8.661124	3.604759
+5.084188	9.415847	4.881474
+3.233400	10.133548	6.658328
+-2.671727	5.834501	7.738187
+-5.586962	6.801060	6.491443
+-0.300033	7.474045	7.886303
+2.959122	8.991690	6.831929
+-6.183061	8.313371	6.049571
+4.871536	9.088393	5.161636
+-7.851500	8.035905	4.251177
+6.314192	6.764987	0.220085
+4.037797	7.132615	6.039136
+4.866675	7.269669	5.167711
+2.389975	6.966941	-4.607930
+6.036700	7.196553	3.010703
+3.487451	7.463430	6.481716
+5.128315	6.459363	4.819489
+-3.312319	8.552966	7.573618
+5.381252	7.880799	4.433421
+-2.296213	7.214618	7.809371
+4.936740	7.142252	5.078796
+6.356512	7.265930	0.749298
+-5.989940	7.693886	6.201601
+-0.330259	8.197873	7.888911
+6.353076	7.452645	1.306126
+0.739180	7.147566	7.723272
+2.828880	6.862924	6.908594
+4.856910	6.987142	5.179873
+5.657584	8.035364	3.933724
+-1.239718	8.616024	7.912864
+-5.292073	8.727537	6.682023
+4.630170	6.844327	5.447683
+-7.776487	6.028616	4.357342
+0.254217	8.814937	7.817341
+-5.268034	7.535614	6.696807
+5.380876	8.718970	4.434040
+-1.523348	8.209247	7.898946
+4.738628	8.587521	-3.159172
+-9.342977	7.790923	-2.589265
+3.885419	6.894194	6.170687
+-2.898182	3.924822	7.686316
+-6.854335	6.854464	5.444460
+-3.369974	7.406866	7.556063
+6.350405	6.123195	0.621545
+4.233985	6.918638	5.858301
+4.322963	6.955515	5.771680
+-0.239685	7.946362	7.880742
+1.511782	7.418960	7.504429
+2.047813	9.715201	7.298597
+-9.452372	8.323733	-0.308520
+-9.453994	6.873090	-0.331165
+-9.253227	6.825797	-3.051144
+-6.102497	8.076519	6.114084
+6.258758	7.557662	2.129656
+4.954859	8.240420	-2.919076
+6.050266	8.209733	2.968663
+-2.841788	6.648304	7.699870
+3.024167	7.917924	6.792281
+3.114077	8.100438	6.735939
+3.632127	6.326400	6.373764
+-6.966740	7.420432	5.329874
+-5.306197	7.406685	6.673285
+3.354050	4.534522	6.576440
+-1.342129	6.924848	7.909008
+-8.440894	8.444134	3.276504
+3.953765	5.767462	6.112599
+-0.353391	7.464577	-4.622600
+-5.600283	7.198775	6.482420
+-1.138683	6.957302	-4.305837
+1.287620	5.885861	7.577024
+3.140525	7.719961	6.719019
+4.439921	8.120649	5.653091
+5.843908	8.567449	3.528124
+-1.999807	7.143920	7.852706
+-6.998283	8.320347	5.296949
+-1.641686	8.000756	7.890141
+-0.637122	7.255327	7.908746
+3.545277	8.370231	6.439237
+-5.057813	8.143913	6.821470
+-1.788540	7.325451	7.876755
+3.231971	5.134089	-4.275558
+0.014418	9.135857	7.852132
+-1.049889	8.435574	7.916518
+5.229370	8.210316	4.671893
+2.998831	7.019181	6.807834
+5.822185	7.044617	3.579246
+3.502988	8.529447	6.470388
+3.556294	8.944615	6.431043
+0.275645	7.626098	7.813860
+6.350581	8.483290	0.624596
+-1.029876	9.017951	7.916638
+-1.236492	8.501062	7.912964
+6.214590	8.318062	2.353006
+2.357908	5.884950	7.157349
+4.657961	8.877248	5.416277
+-3.966537	6.800191	7.346643
+1.655427	7.619326	7.453822
+-3.261750	7.149449	7.588636
+5.816712	7.414349	-1.557348
+1.032510	5.810382	7.650472
+6.294199	6.563924	0.071914
+1.120549	7.415623	7.626208
+5.858183	8.279860	3.493880
+-2.267918	8.699031	7.813994
+6.141022	6.650649	2.658369
+2.042799	7.661015	7.300742
+1.572004	8.557056	7.483607
+4.002859	7.401975	6.069959
+-8.902572	8.193579	2.234928
+-6.757595	6.495666	5.539775
+5.814173	6.947760	3.597812
+3.928729	8.669335	-3.858716
+2.746337	5.551667	-4.487923
+6.222903	7.098401	2.313919
+3.296003	7.563959	-4.243062
+4.094151	5.513191	5.988561
+-2.383802	8.495619	7.794405
+-8.119292	7.726085	3.842728
+-1.721302	8.385161	7.883222
+5.840237	7.248265	3.536846
+6.045281	8.429626	-0.974619
+-6.645581	7.511995	5.646504
+3.676010	6.462470	6.339899
+-6.524017	6.610462	-7.983916
+3.022285	8.377986	6.793441
+5.535313	8.150169	4.167033
+5.417565	7.607157	4.373057
+-7.903988	7.482833	4.174878
+5.886684	7.312755	3.423871
+0.275682	6.865824	7.813853
+-9.015928	8.891424	-3.931973
+-1.964493	8.183805	7.857120
+0.868877	6.122895	7.692594
+-6.501610	5.924574	5.778242
+-4.445926	8.431146	7.139840
+3.630562	7.560270	6.374962
+-0.724959	7.132482	7.912209
+3.984812	8.029555	6.085724
+3.112338	5.620074	6.737046
+6.158734	7.283831	2.590392
+3.505755	8.124096	6.468364
+3.901459	5.420980	6.157185
+-3.650869	7.628908	7.463849
+3.777994	7.352483	6.259097
+2.904796	6.157412	6.864344
+-7.607583	6.222248	4.584928
+-4.892777	9.297294	6.913692
+6.361003	6.435683	0.991536
+-8.797081	8.179497	-4.544607
+-8.796629	8.225805	2.507647
+6.359129	9.044684	1.142701
+1.193283	6.636904	7.605304
+-0.747523	6.669483	7.912940
+1.997140	5.531188	7.320083
+-0.352661	6.222056	7.890768
+5.849521	6.999574	3.514722
+-1.104270	7.883201	7.915935
+2.862075	8.044723	6.889394
+-2.952922	6.923279	7.672752
+-4.735157	6.955731	6.997307
+-0.181899	6.131445	7.874975
+1.418946	8.296576	7.535425
+-8.385729	8.203527	3.380724
+0.424236	7.739502	7.788016
+1.542411	8.581142	7.493910
+3.148428	7.671558	6.713933
+-8.159330	9.583673	3.777221
+5.351837	7.865185	4.481318
+0.300457	9.028326	7.809751
+-7.052613	8.727547	5.239419
+5.823358	10.398561	-1.542658
+6.346598	4.926079	1.416869
+-1.320844	6.475045	7.909918
+-2.261126	9.082600	7.815089
+1.895567	9.200700	7.361846
+-1.584108	7.255155	7.894646
+5.644996	4.143754	-1.904898
+4.966373	7.129021	5.040286
+6.346146	8.891149	1.423544
+5.942655	7.292993	3.279345
+5.224576	9.170574	-2.575991
+0.711951	8.680131	7.729412
+4.371493	6.613097	5.723145
+-2.321962	5.922016	7.805074
+5.039264	8.445435	-2.817415
+6.266070	8.601982	2.088217
+-3.598651	7.143981	7.481840
+-0.668148	5.607563	7.910081
+6.245001	7.990574	-0.215363
+-3.184933	8.201757	7.610774
+-7.744719	7.206444	-6.521306
+-3.850260	6.913175	7.391520
+-4.029040	6.273264	7.321684
+4.252708	10.519454	5.840322
+-9.466489	9.109256	-0.542468
+6.117972	7.595596	2.742709
+-9.173549	7.484529	1.374899
+-0.022494	6.172525	7.856812
+-5.760021	6.991215	6.371314
+6.244617	6.520502	-0.217323
+1.607393	6.167728	7.471106
+6.044820	8.261442	2.985655
+2.244340	8.182271	7.211061
+4.557560	7.204057	5.528005
+-8.086230	6.975473	3.895884
+-2.121861	7.079526	7.836223
+-7.691109	6.849068	-6.598656
+-6.820449	7.711009	5.478187
+0.184109	8.427748	7.828303
+1.907641	7.339092	7.356973
+3.424941	7.709557	-4.174253
+-6.153544	7.248713	6.073392
+3.524789	7.606886	6.454387
+3.539394	7.409512	6.443599
+0.548160	9.020287	7.764171
+2.533926	7.235065	7.069406
+-2.419615	7.439628	7.788000
+0.102776	7.542637	-4.732985
+6.291273	6.933579	1.931287
+4.855127	6.289525	5.182087
+-6.765639	5.535820	5.531962
+-1.100403	6.350964	7.915989
+-9.473760	7.699958	-1.253486
+5.680254	7.902587	-1.838043
+-0.335856	6.952193	7.889381
+4.575161	7.371058	5.508760
+2.505241	10.157312	-4.572280
+0.369977	8.331159	7.797799
+-8.453016	5.951523	3.253151
+5.805336	9.182754	3.618115
+1.730073	9.662341	7.426234
+4.997530	6.370577	4.999191
+-3.249081	7.502106	7.592343
+-9.477245	7.115397	-1.025054
+5.750761	5.547668	-1.697717
+-2.605591	6.960813	7.752059
+5.786660	7.811105	3.660435
+-7.248153	6.970221	5.023369
+-9.477241	7.067643	-0.962881
+4.286932	7.717918	5.807119
+2.131911	7.675562	-4.677103
+5.866604	6.504499	3.473428
+-9.321725	8.247840	0.710182
+4.522854	8.831682	5.565542
+1.824403	8.171250	7.390083
+3.895328	7.311882	6.162355
+0.500799	7.841007	7.773532
+6.360553	6.134380	1.063968
+-1.243760	9.391655	7.912737
+-3.372722	6.932812	7.555215
+6.357561	7.453840	0.779004
+2.587772	6.191203	7.041330
+4.060982	8.791239	6.018459
+5.602012	8.081433	4.042479
+-9.077068	6.740655	-3.733853
+3.981327	9.929923	6.088756
+5.920037	6.508152	3.338944
+-3.276955	9.328079	7.584158
+6.003296	7.780698	3.110387
+6.359511	8.711738	0.850391
+-4.866301	8.356692	6.928038
+-6.916398	7.079579	5.381715
+-0.102307	7.976592	7.866321
+3.979945	7.817220	6.089957
+4.211940	6.575031	5.879302
+5.887451	6.015154	3.421953
+-0.733913	7.878341	7.912507
+-3.125329	6.697193	7.627394
+-6.922420	8.085911	5.375560
+3.286479	7.029660	-4.247965
+3.687861	7.261034	-4.019285
+5.681640	7.883537	3.885107
+1.263660	8.481316	7.584332
+6.347892	5.847123	0.580506
+5.262682	6.908178	4.621396
+-3.782596	7.820711	7.416715
+0.061697	9.240942	7.845876
+5.729288	8.707359	3.785810
+5.166742	7.690181	4.764320
+-0.091667	7.694332	7.865102
+-8.476749	7.434249	-5.269119
+-5.718910	8.261278	6.400428
+-4.201475	7.439952	7.249750
+0.250055	6.496613	7.818010
+6.303937	6.958160	1.841469
+6.358481	7.403499	0.809088
+5.617285	4.820202	-1.956023
+6.224497	6.137625	-0.315755
+-4.868422	6.989113	6.926893
+2.872551	7.663263	6.883287
+0.427736	6.187043	7.787372
+5.170819	6.040786	4.758399
+6.247120	9.079665	2.192629
+3.647424	6.889324	6.362020
+4.301086	5.540704	5.793258
+2.141243	9.051478	7.257823
+3.458850	7.566013	6.502405
+3.955735	7.960201	6.110903
+-4.908849	7.589488	6.904924
+2.083486	7.460633	7.283207
+5.391952	7.996370	4.415779
+6.346483	6.635142	0.559219
+-4.257017	8.559807	7.225604
+-2.037754	6.963493	7.847785
+1.238738	6.348253	-4.807544
+2.192859	7.660561	7.234647
+4.835362	7.332150	5.206514
+0.648422	8.661534	7.743335
+-1.521003	6.165460	7.899103
+6.083312	6.844283	2.862010
+4.735022	8.063465	5.327186
+4.636124	8.031571	-3.263851
+1.410927	7.979688	7.538041
+5.738476	6.918446	3.766177
+-6.375569	6.960824	5.888803
+-1.170237	8.480965	7.914728
+6.320491	8.625886	0.273281
+6.251438	9.217744	-0.181965
+-2.234117	8.170640	7.819382
+4.944896	6.349166	-2.930763
+2.297860	7.184230	7.186040
+6.070834	7.694718	2.903031
+-9.274463	8.414727	-2.952000
+3.217512	6.933662	6.668853
+-6.102736	7.751450	6.113895
+5.772427	6.752888	3.692175
+5.791323	7.481863	3.649942
+5.568795	7.088601	4.105280
+0.577830	7.832262	7.758150
+4.144203	5.926029	-3.698474
+6.129768	6.829884	-0.699540
+-3.297193	8.024220	7.578148
+6.358020	7.620347	0.793416
+3.902236	6.684126	6.156529
+4.224781	7.729601	5.867091
+-1.270074	7.713791	7.911859
+6.359562	7.721199	1.123708
+-2.708990	7.785976	7.730119
+6.349131	8.407377	0.600185
+6.021141	7.252759	-1.045575
+3.147632	7.237756	6.714446
+5.400282	6.420153	4.401961
+6.354156	8.134127	1.283659
+0.803408	6.196823	7.708377
+5.402218	6.184263	4.398739
+-5.233859	9.140946	6.717635
+4.011303	7.469947	6.062546
+-1.962944	7.524212	7.857310
+-9.346823	6.747060	0.568108
+4.266689	7.563927	-3.599749
+6.284477	7.934805	0.008150
+-7.089520	8.879071	5.199737
+6.316611	8.114970	0.240059
+3.048044	7.632666	6.777494
+0.910120	7.493826	7.682340
+-3.917933	8.683322	7.365646
+5.434955	7.994632	-2.265536
+4.233330	8.035306	5.858927
+6.118830	7.008004	2.739644
+0.353716	6.738118	7.800653
+3.585398	7.109843	6.409244
+-0.551278	7.456853	7.904411
+-3.326026	4.393035	7.569487
+2.085628	8.474764	7.282276
+6.105830	9.409802	2.785460
+-6.229617	7.220079	6.011559
+5.247765	8.136057	-2.543826
+-0.574499	7.308740	7.905677
+-0.729957	8.409236	7.912376
+-0.345802	8.516231	7.890206
+2.569365	7.975445	7.050990
+4.631701	7.908545	5.445962
+2.927578	8.951455	6.850827
+6.143835	6.242815	2.647770
+2.083295	7.110173	7.283290
+-3.881560	7.792531	7.379637
+6.024319	9.191448	3.048260
+-6.397327	7.901329	5.870025
+4.647852	8.578470	5.427744
+4.964773	9.060097	-2.907381
+4.124430	7.313521	5.960941
+-8.072332	6.383679	3.917983
+-9.265460	7.468098	-2.994653
+4.167869	9.560358	5.920757
+-6.457934	8.405787	5.817045
+6.245927	8.113575	2.198894
+2.200433	14.164363	7.231206
+6.277532	14.095384	2.019850
+-4.220554	13.524413	7.241510
+-1.318065	12.418892	7.910033
+-8.149477	13.914129	-5.880997
+-3.887267	12.666497	7.377455
+0.148262	12.069997	7.833655
+5.094620	8.701143	4.866950
+3.949762	11.800855	6.116041
+1.511088	12.142680	7.504666
+6.276827	9.878912	2.024190
+6.355193	12.665769	1.260367
+5.017600	11.788278	4.972383
+-7.360885	13.129024	4.891973
+-5.622475	14.109412	6.467306
+-4.158722	13.220162	7.268011
+2.284093	12.306755	7.192525
+-3.644775	10.761219	7.465969
+-7.657254	11.230894	4.519568
+-0.258630	11.401916	7.882539
+-0.350257	14.186523	7.890572
+-9.123218	11.672188	-3.573463
+-2.949955	12.359559	7.673498
+6.174855	12.262373	2.525785
+3.044262	13.392558	6.779844
+2.220515	12.822477	7.222035
+2.195097	11.007434	7.233631
+0.100823	13.814423	7.840475
+5.373599	10.806169	-2.360648
+-1.604513	13.561378	7.893097
+3.867103	12.184161	6.186007
+3.698579	12.144320	6.322273
+4.513979	12.038316	5.575054
+4.227247	14.107116	5.864738
+1.120093	13.679655	7.626336
+6.188756	11.989198	-0.473422
+3.438085	12.280810	6.517294
+6.112719	10.956084	2.761336
+-3.187831	11.986597	7.609954
+3.933784	13.348806	6.129733
+0.182424	12.800039	7.828558
+2.048057	11.698212	7.298492
+3.928296	13.008112	6.134416
+-2.308261	11.020246	7.807371
+-6.045383	12.809784	6.158867
+6.155309	12.219690	-0.605931
+0.240435	10.818987	7.819548
+6.108442	11.364076	2.776353
+4.545351	12.861107	5.541272
+0.235485	12.115360	7.820334
+-8.667115	12.855218	2.810004
+0.284741	13.335377	7.812363
+3.887776	13.376884	-3.887347
+-8.887502	13.046304	-4.306740
+2.136939	12.566704	7.259735
+4.528727	13.555790	5.559227
+-2.438147	11.251261	7.784621
+-8.458652	11.071519	3.242236
+2.806073	13.502271	6.921654
+-7.725708	12.797749	4.427379
+-0.003525	11.421310	7.854430
+6.358981	12.445085	1.148688
+4.585620	13.790860	5.497256
+3.143811	11.802578	6.716906
+-4.133530	12.826963	7.278638
+-2.439599	11.199172	7.784354
+4.217928	12.501718	5.873615
+2.049950	11.928073	7.297681
+2.949844	11.699537	6.837510
+2.456571	11.932754	7.108770
+4.029759	12.196578	6.046263
+-2.190809	11.503567	7.826071
+-6.459364	12.207927	5.815784
+4.536451	11.906068	5.550900
+-3.589441	11.646578	7.484972
+5.546791	10.098034	-2.080909
+-5.405388	13.270638	6.610830
+1.061260	10.977444	7.642673
+6.290275	11.654097	1.938002
+-4.230323	12.889407	7.237269
+6.203905	12.010467	-0.408922
+4.772615	12.709266	5.282614
+-1.431253	11.690071	7.904576
+2.580461	11.803336	7.045174
+-8.594275	10.839684	2.967809
+5.871124	13.221555	3.462370
+2.153124	13.809412	7.252530
+6.359263	11.244083	0.839400
+4.429757	12.805778	-3.458944
+-0.820555	12.311015	7.914864
+5.847632	13.556211	3.519243
+5.314352	13.779052	-2.448780
+1.945985	11.703117	7.341333
+-4.670287	13.355747	7.030485
+5.135028	11.670193	4.809933
+-9.311509	13.428087	-2.765352
+3.566544	11.807070	6.423392
+-3.699740	12.688370	7.446655
+1.232220	11.272387	7.593792
+-9.191700	10.931599	1.304463
+3.955532	10.072039	6.111078
+1.418355	11.334162	7.535618
+-3.643768	12.687898	7.466318
+-7.684714	12.596101	4.482886
+5.833969	13.917750	3.551659
+6.308725	12.920192	1.804926
+-1.940517	13.537663	7.860027
+1.530055	12.863642	7.498171
+5.886935	10.905312	-1.396406
+5.005207	12.428249	-2.859015
+3.903090	10.956713	6.155808
+-6.420539	11.382701	5.849852
+1.216096	13.316237	7.598587
+0.875209	13.006677	7.691036
+-1.997680	13.429089	7.852976
+-1.973448	12.520848	7.856016
+-3.425593	11.960627	7.538689
+6.268910	12.903694	-0.085811
+5.990917	12.857821	3.146061
+1.012205	10.830698	7.655909
+-1.846537	12.420164	7.870717
+6.183338	13.237705	2.490626
+5.448923	14.035248	4.319779
+5.995965	13.767690	3.131592
+6.274832	14.495142	-0.051112
+6.359164	12.723624	0.835211
+-7.593493	11.184679	4.603240
+3.083310	12.681520	6.755422
+0.260013	11.221469	7.816405
+0.343636	10.990475	7.802404
+6.277683	12.464771	2.018922
+-3.066784	12.482497	7.643248
+5.781331	12.400478	3.672372
+-3.621242	13.068836	7.474104
+-1.931696	13.049244	7.861078
+0.837351	12.632811	7.700270
+1.967123	11.270134	7.332606
+-9.394388	14.145462	0.253446
+-2.534120	13.438985	7.766405
+0.227254	11.350399	7.821635
+5.028503	15.122921	4.957706
+-0.948655	14.875576	7.916607
+3.986816	12.556429	6.083979
+2.514664	11.912112	-4.569233
+5.708457	11.516965	3.829728
+1.422349	11.121027	7.534313
+2.736121	12.413041	6.961043
+6.351253	11.739322	1.340829
+3.079889	11.625464	6.757575
+-2.089229	12.031501	7.840816
+-2.193304	12.123252	7.825692
+1.889709	10.868271	7.364203
+-3.206395	11.656659	7.604671
+6.148047	14.577196	2.631759
+-4.757755	14.360336	6.985581
+6.313249	12.424539	0.212440
+-5.792592	9.989841	6.347988
+2.163711	10.312711	7.247792
+6.258981	13.025250	2.128419
+-9.195631	11.647591	1.288903
+2.210246	12.189040	7.226733
+-5.003531	14.103574	6.852340
+5.550904	13.159012	-2.073816
+5.844144	13.173195	3.527564
+-5.941823	11.730335	6.238107
+-1.076183	13.556973	7.916283
+-9.417359	12.726085	-2.060862
+0.601662	13.071128	7.753225
+6.328447	11.973070	1.632760
+6.241337	13.075520	2.222702
+-6.916809	13.412766	5.381296
+5.773889	12.341480	-1.649569
+-1.917718	13.168649	7.862723
+6.261572	11.640070	2.113891
+5.714205	13.660235	3.817691
+4.934298	12.570212	5.081946
+3.293490	11.983398	6.617982
+2.273933	11.513960	7.197289
+3.272921	12.765062	-4.254902
+-3.338229	12.549393	7.565786
+2.291945	12.962360	7.188830
+5.323405	12.042522	4.526801
+6.163162	12.864248	2.572920
+3.642408	13.745150	6.365878
+-3.154212	13.596934	7.619401
+6.331762	11.176867	1.599055
+-7.429717	13.007145	4.809099
+6.133577	13.712728	2.686094
+6.360139	11.659783	1.093091
+3.321973	12.503798	6.598555
+4.630000	12.749431	5.447874
+-0.187077	11.757690	7.875510
+-0.026942	12.323031	-4.706588
+5.898208	13.115250	3.394906
+5.113889	12.352899	4.839911
+4.650267	12.218878	5.425009
+1.829654	10.485933	7.388028
+1.795357	12.294968	7.401369
+6.277540	12.579757	2.019801
+5.888066	12.701606	-1.393705
+-6.543243	12.863081	5.740756
+-2.852873	12.457608	7.697240
+1.664032	12.414204	7.450688
+1.366065	11.593582	7.552490
+4.471176	10.306188	5.620444
+-0.247407	10.363644	7.881480
+-7.280583	12.610463	4.986106
+3.403407	11.526202	6.541915
+-9.476786	12.750985	-0.896430
+3.846335	11.429080	6.203254
+-6.349315	13.675906	5.911294
+1.336542	12.372627	-4.801329
+-6.825960	11.448446	5.472727
+-4.207928	11.107317	-9.714631
+4.081699	12.052876	5.999829
+-5.513027	11.646216	6.540860
+4.861402	13.063875	5.174285
+5.812969	13.018677	3.600589
+4.918862	11.593634	5.101766
+6.360986	14.440337	0.973071
+6.354472	10.587327	0.700211
+5.860518	11.968779	-1.458480
+4.591805	10.905904	5.490430
+3.091216	15.205112	6.750436
+5.750453	10.551481	3.740338
+5.778717	11.294675	-1.639376
+6.193853	11.509911	-0.452065
+2.387238	12.996018	7.143096
+-3.947961	13.152992	7.353947
+1.393332	14.624528	-4.796817
+-5.124594	12.675945	6.782756
+-2.636198	13.273949	7.745711
+3.206701	11.884348	6.675981
+-0.746332	11.923439	7.912903
+0.605017	10.787804	7.752526
+6.131423	13.038070	-0.693648
+6.205244	13.275952	-0.403064
+-5.561020	11.049258	6.508909
+-6.083835	12.775993	6.128803
+-5.487547	12.654022	6.557633
+-9.161911	10.765233	1.418888
+2.692253	12.710407	6.985239
+6.344149	12.979093	0.526139
+3.651573	10.832002	6.358823
+6.172976	12.897683	2.533460
+0.172148	11.975954	7.830108
+-1.892359	11.576016	7.865645
+-5.500278	13.369908	6.549269
+5.658250	12.116016	3.932391
+-5.080908	11.447498	6.808173
+0.519807	11.466808	7.769812
+-6.060278	14.865419	6.147263
+-7.464249	11.110530	4.766732
+4.338877	15.118552	5.755867
+-8.627323	12.997509	2.897199
+4.015015	11.515665	6.059280
+-3.185039	13.168533	7.610744
+6.229678	13.322411	-0.291177
+6.353475	11.503833	1.298020
+3.771560	13.143656	6.264283
+6.169827	12.485710	2.546237
+-2.410118	12.906419	7.789715
+4.796591	12.690493	5.253791
+6.348361	11.880059	0.587843
+-0.195490	10.731857	7.876371
+-1.688917	12.856378	7.886133
+4.789836	12.629618	5.261943
+5.746570	11.622587	3.748745
+6.265486	12.448909	2.091582
+4.930546	12.092473	5.086777
+-6.084301	11.766311	6.128437
+5.140407	13.570652	4.802251
+5.713530	12.540809	3.819107
+3.292885	12.807401	6.618393
+-8.375042	12.816528	3.400541
+-1.849052	14.102131	7.870446
+6.360283	12.820072	0.892379
+-5.694074	13.916474	6.417840
+-1.709123	14.323741	7.884333
+-4.788526	12.469959	6.969475
+0.972192	14.028338	7.666447
+3.011308	13.105401	6.800192
+1.468204	12.717187	7.519144
+-0.128018	11.544309	7.869207
+1.769517	14.310779	7.411293
+3.416378	13.297534	6.532741
+6.091342	12.817288	2.835098
+4.903210	13.718114	5.121715
+1.861891	13.165251	7.375313
+5.894992	11.682738	-1.377079
+-3.290961	13.090343	7.580004
+-2.550072	12.844061	7.763261
+6.358691	12.985470	0.816711
+-6.282860	12.918771	5.967415
+0.582429	11.916590	7.757206
+5.509402	13.611536	4.213812
+4.462385	13.698571	-3.429406
+-2.074580	11.103913	7.842834
+-2.007179	12.690377	7.851764
+3.328435	13.246261	6.594120
+-4.361705	14.180183	7.178776
+5.427762	13.230125	4.355852
+-2.564246	10.412587	7.760439
+0.511257	13.505404	7.771491
+-7.118429	12.853766	5.168306
+-9.475765	13.800358	-0.824356
+-4.653438	12.465317	-9.455134
+-0.052124	12.139902	7.860440
+3.289446	11.502707	6.620725
+6.305397	11.844576	1.830494
+6.074502	11.202684	-0.884608
+-8.161062	11.140775	3.774358
+-5.573957	11.733566	6.500216
+2.046286	12.404400	7.299250
+4.699889	11.882113	-3.199379
+0.346451	13.200982	7.801916
+5.453170	13.565698	4.312478
+-8.778350	11.780831	2.552213
+1.446117	11.222921	7.526491
+5.733850	11.498124	-1.732233
+3.570606	12.396087	-4.090901
+2.283106	13.869582	7.192989
+1.005622	12.366440	7.657658
+-7.628607	11.979518	4.557416
+4.602979	13.059481	5.478051
+-7.374732	11.122810	4.875466
+5.695161	14.157429	-1.809139
+-8.930701	12.672494	2.157866
+4.604108	13.273024	5.476797
+-5.565179	12.244452	6.506119
+0.289461	16.133265	7.811582
+0.548967	11.458822	7.764009
+4.012242	12.936111	6.061721
+5.740129	9.532853	-1.719483
+1.335582	14.451628	7.562134
+-3.387440	10.952416	7.550654
+5.525784	12.191532	4.184337
+6.102361	11.889337	2.797478
+4.274842	12.854650	5.818899
+1.795004	11.690718	7.401505
+3.559171	12.862841	6.428899
+4.015833	12.018083	6.058560
+0.748977	11.787962	7.721038
+6.317957	10.756836	1.729344
+4.428278	12.278813	-3.460272
+4.980967	14.513476	5.021115
+-4.615852	13.423309	7.057780
+-8.992003	12.593540	1.981944
+6.315022	12.590719	1.754185
+1.492497	12.890922	7.510977
+4.255104	12.878121	5.838012
+-3.824468	10.568661	7.401203
+-9.378446	11.659904	-2.363303
+5.784078	15.986412	3.666226
+3.532043	11.874227	6.449036
+4.746011	13.176407	5.314234
+-9.427570	11.192457	-1.965880
+-2.733807	12.664026	7.724643
+1.495107	13.675998	7.510095
+0.897727	11.975868	7.685447
+-9.083527	11.597328	-3.712014
+-0.652447	12.807256	7.909421
+1.479503	11.562107	7.515357
+6.355416	11.297017	1.255073
+-7.650590	12.121900	4.528409
+-5.959013	11.854527	6.225127
+-0.086290	13.258193	7.864480
+-7.606603	11.621420	4.586204
+-7.326432	13.920220	4.932690
+0.262480	10.668410	-4.760242
+-5.978467	12.509034	6.210354
+2.050260	13.495431	7.297548
+-7.207742	12.208868	5.069218
+4.969893	11.880338	5.035675
+5.687324	10.996126	3.873475
+2.761243	11.914754	6.947012
+3.239989	14.571603	6.653946
+6.170931	11.848683	-0.545584
+5.779203	12.955048	-1.638347
+-2.195182	11.747440	7.825406
+-0.472425	13.861354	7.899600
+-4.767957	12.105010	6.980259
+-2.996437	12.406511	7.661683
+-9.261020	13.100410	1.011002
+-9.462484	12.115588	-1.524710
+-3.174954	11.106646	7.613591
+2.506613	13.339085	7.083434
+-2.677340	11.795304	7.736984
+6.219530	13.050186	2.329921
+3.336722	14.179254	6.588418
+4.221149	12.546914	5.870551
+1.913723	12.890158	7.354508
+-1.653337	12.937776	7.889179
+6.347230	12.100841	1.407362
+6.360152	11.963744	0.884033
+1.093849	11.142433	-4.813126
+3.964864	12.416487	6.103027
+0.194810	12.225559	7.826672
+-1.059100	12.965112	7.916445
+6.329662	11.932717	0.358924
+12.117801	7.615474	-2.463974
+12.476514	5.315066	2.802028
+12.283168	7.221501	3.839794
+12.603519	7.435058	0.750364
+12.592326	6.902404	1.582770
+9.170913	7.858727	-7.548165
+7.499041	6.116741	-8.970199
+6.869344	6.419820	-9.381533
+12.605719	6.432333	0.923674
+11.224176	6.819834	6.772856
+12.263786	8.649724	-1.911026
+12.360116	7.944104	3.480738
+12.016205	8.681077	-2.797770
+6.715064	5.769742	11.851487
+3.304638	7.618947	13.498445
+2.652706	9.302665	-10.916271
+12.198309	7.961886	-2.171957
+10.996977	8.512681	7.203566
+4.538645	5.885145	13.034513
+6.432457	7.909771	12.035184
+10.003697	8.758199	8.740944
+4.720601	8.631316	-10.400484
+-3.333133	7.570941	-10.144582
+10.727716	7.270708	-5.569854
+12.597712	8.025809	1.452255
+3.711269	7.638358	-10.704200
+10.560779	9.236596	-5.827907
+10.379283	7.613797	-6.093002
+-6.247664	6.468697	13.173585
+5.640737	7.111267	-10.030591
+4.954559	7.276987	-10.315167
+-2.928065	8.231343	14.040700
+-4.534519	7.544332	13.726638
+9.976473	7.938843	8.777152
+3.717911	10.089548	-10.702532
+10.817113	8.620865	-5.425469
+-3.103877	8.278997	-10.241276
+5.400397	8.898638	-10.136236
+6.120927	9.093006	-9.799335
+7.008441	8.161593	11.649963
+7.700290	6.716768	-8.826038
+12.432928	6.803510	3.082792
+1.224284	7.922068	13.996476
+12.058273	8.341711	4.686799
+9.872558	7.029309	-6.761280
+9.960082	7.203683	-6.652607
+8.156103	7.744110	-8.474580
+9.304862	7.261491	-7.408487
+12.215779	6.742451	4.119245
+12.572268	7.495197	0.077119
+3.889592	5.510177	-10.657932
+2.695033	9.675566	13.679702
+11.825405	7.049338	-3.348740
+12.344662	8.548160	3.557042
+-5.092813	6.342415	13.572089
+7.687894	7.226831	11.137239
+0.545483	8.299589	14.087910
+2.561366	6.515293	-10.929653
+-6.099383	7.351244	-8.382270
+12.200227	7.495353	4.180085
+8.266143	7.785569	-8.384221
+12.533724	7.268476	-0.348547
+12.598801	7.130035	0.573150
+12.297999	7.001475	3.774335
+4.048360	7.360770	-10.614112
+8.872720	6.945690	10.060920
+12.542827	5.578369	2.258338
+12.602721	8.515038	0.712509
+7.286051	7.962592	-9.115883
+6.954644	8.784232	11.687769
+12.448153	7.276414	-0.987164
+12.335934	7.950223	-1.590987
+12.536696	6.363012	2.318113
+11.917202	6.653421	-3.094186
+12.590491	7.132051	0.373562
+12.316961	8.367514	-1.679085
+12.393544	6.882112	-1.301746
+5.559799	7.292366	12.542932
+12.436356	8.214343	3.062120
+-2.073902	8.565516	-10.601441
+3.106069	7.628667	13.560833
+12.334180	7.305677	-1.599262
+3.345721	7.423119	13.485127
+8.986596	6.983190	-7.733120
+-8.287909	6.497748	-5.634545
+12.186905	8.300797	4.231241
+10.485464	4.578501	-5.939766
+8.015121	7.559538	-8.587133
+8.164512	8.288012	-8.467754
+5.044384	7.179834	-10.280860
+11.017901	6.517774	7.165477
+10.077070	7.193753	8.642057
+4.275679	7.833303	-10.547008
+6.048655	8.797256	-9.835909
+11.731815	7.346617	-3.591642
+7.222918	7.879450	-9.157749
+-1.858404	9.084682	-10.662265
+10.485500	7.391798	8.053432
+10.832532	6.598463	7.492996
+12.524820	6.870312	-0.428713
+9.339466	8.506801	9.557868
+10.347612	6.370546	-6.137741
+12.591658	8.151038	1.596983
+11.836186	5.860574	5.355112
+12.510739	8.290920	2.545682
+6.796725	7.501466	-9.425317
+8.180317	7.192581	-8.454888
+10.883362	6.485569	-5.315454
+8.504249	7.530093	-8.180837
+-0.724270	6.081097	-10.905232
+10.230831	7.691686	8.428378
+11.659548	6.699526	5.814559
+9.368641	5.855153	-7.340348
+2.159320	8.411914	13.814369
+12.346853	8.821002	-1.538822
+4.613264	6.143349	13.002049
+7.299669	7.867710	11.438387
+11.171941	7.129693	-4.802376
+9.925467	5.882082	-6.695897
+7.426975	6.194402	11.342114
+11.379003	8.193646	6.454989
+8.207717	9.758660	-8.432475
+7.724381	6.750428	-8.808343
+7.024567	6.584960	11.638554
+10.140969	7.909540	-6.419449
+8.871038	10.295108	-7.845010
+12.400649	7.468600	-1.263387
+10.173426	6.192666	8.509206
+11.593900	8.011367	-3.924181
+9.233049	4.820097	9.677198
+11.352597	8.280581	-4.448172
+10.382578	5.523170	-6.088322
+10.536784	7.003312	-5.863840
+-4.830463	6.965111	-9.343669
+8.075752	7.434647	10.812739
+12.239098	8.815696	-2.012262
+12.167772	8.975361	-2.286049
+6.465675	6.121817	-9.615920
+12.416100	6.175255	-1.177539
+12.328762	7.164798	3.633171
+-14.142083	5.971424	5.875845
+4.564249	7.545279	13.023433
+11.749210	5.381868	5.587850
+11.204405	7.065555	-4.740796
+-6.365646	7.854759	-8.137804
+-2.950412	5.811823	14.037624
+3.898303	7.728798	-10.655592
+11.837867	7.647293	5.350478
+10.154785	6.543691	8.535179
+2.637704	6.611702	-10.918522
+-5.071965	7.750295	-9.183445
+7.608727	6.840312	11.200500
+12.086434	7.716573	-2.570719
+7.833564	6.971144	11.018254
+7.227764	6.702410	-9.154557
+12.353757	6.196330	3.512425
+11.519611	9.276255	-4.092596
+12.561339	7.555327	-0.061327
+9.993982	8.255218	8.753894
+12.602398	6.957977	0.698539
+1.206998	8.206717	-11.038692
+9.068106	7.843693	-7.652334
+-2.944341	8.058730	14.038463
+12.068024	6.732886	-2.631763
+11.919195	8.530890	-3.088458
+12.605272	7.390704	1.125837
+-3.088801	6.941105	14.017777
+-7.149141	6.443641	-7.304602
+8.799339	8.027759	-7.912921
+-2.858662	7.329884	-10.337826
+12.557043	6.477845	-0.110975
+10.352265	7.967540	-6.131195
+8.907776	8.201834	10.024896
+10.261865	6.202061	-6.256736
+12.474163	8.564901	2.818297
+11.393506	6.564949	-4.363787
+9.039207	7.447365	-7.681156
+12.133834	8.010927	4.427035
+-8.714766	7.360652	11.922957
+12.079281	6.114276	4.616584
+-8.204597	8.298765	-5.784814
+1.651712	8.205140	-11.021253
+-1.083541	8.179799	-10.841956
+-0.120898	6.655204	14.145197
+12.601406	7.663968	1.334745
+10.070922	6.900444	8.650417
+6.105740	7.164763	-9.807074
+12.302969	7.309710	-1.742139
+10.524453	9.410295	-5.882197
+4.216486	7.773072	-10.564980
+-1.096785	8.498021	-10.839385
+9.622041	8.455131	-7.058593
+12.513221	8.262786	2.525405
+9.934394	7.663393	8.832613
+9.324376	6.022754	-7.387754
+-2.986244	7.293205	-10.288466
+9.789091	8.265431	-6.862540
+6.036937	7.172428	12.276046
+8.629953	7.134370	10.303165
+3.874619	5.555624	-10.661937
+12.253143	8.583001	-1.955124
+7.038052	8.301483	-9.276981
+11.180540	7.371983	6.858659
+11.662423	6.351946	-3.762420
+12.601848	8.279064	0.676144
+11.619500	6.735740	-3.864502
+9.613063	8.880852	9.237253
+10.533295	8.348035	-5.869041
+12.328526	5.744434	-1.625756
+5.381018	5.523841	-10.144469
+11.774699	11.182868	-3.482248
+11.742660	7.163888	-3.564266
+2.239191	7.885492	-10.970730
+-1.338280	7.890723	14.168280
+-4.848380	6.971962	-9.332111
+3.801503	7.325862	-10.681176
+9.861624	9.074855	8.927122
+6.149170	8.121749	-9.784869
+8.635898	8.199501	10.297380
+11.684111	8.427819	-3.709831
+2.738467	8.709106	-10.903004
+-0.873876	7.096826	-10.880393
+7.062165	6.830113	11.611816
+11.168130	7.174346	6.882779
+12.014050	7.026136	4.830075
+12.599734	7.267940	1.392283
+1.636717	8.173922	-11.022132
+5.878658	6.877892	12.367368
+11.114161	6.372688	6.986270
+4.156975	7.793698	-10.582694
+8.890907	8.117092	-7.825987
+11.038549	9.213902	-5.046824
+9.588704	10.278598	-7.096707
+-1.596210	8.130804	-10.729814
+-7.818001	6.802782	-6.413011
+10.883703	7.123479	-5.314882
+-2.879242	9.409180	-10.329991
+11.520928	8.247060	-4.089671
+-6.559795	7.054917	13.046762
+9.925937	8.487898	-6.695312
+7.325962	8.729254	11.418696
+11.627454	8.656857	5.892801
+9.505715	7.281099	-7.190184
+8.559530	6.920913	10.371162
+12.312234	6.971117	3.709964
+12.457532	8.181591	-0.927779
+4.875251	7.450266	12.883870
+11.337071	6.822165	6.543266
+-15.107192	9.753317	3.405405
+11.051246	7.690311	-5.024123
+11.647473	7.677173	-3.798270
+9.975793	9.289726	8.778052
+5.456009	10.889844	-10.112374
+11.615572	8.522256	5.921407
+8.908507	6.880747	10.024143
+10.890256	6.658959	-5.303852
+5.758559	8.533858	12.434796
+-4.763545	6.643364	13.666152
+2.011565	7.365296	-10.994029
+11.804479	8.417304	5.441542
+11.560636	6.134197	6.051229
+12.587921	6.384390	0.323744
+5.848055	8.313186	-9.934111
+-0.892825	5.809355	14.171987
+6.452808	7.933476	12.022287
+9.352387	7.723934	9.543182
+5.059926	8.119093	12.796556
+0.095258	6.720846	14.130104
+0.076211	7.247495	-11.002237
+8.820244	7.630691	-7.893237
+8.672126	9.678521	-8.030658
+7.863370	7.756448	-8.704382
+4.866293	8.145004	-10.348036
+11.133804	7.431459	-4.873642
+8.916412	8.737880	-7.801439
+4.386719	7.657166	13.098998
+12.598787	6.559614	0.572708
+10.634473	7.181063	7.820486
+-6.966182	7.771359	12.868818
+5.155142	7.331320	-10.237358
+11.914538	8.017058	5.132875
+9.741015	7.672185	-6.919849
+9.346666	8.207235	9.549690
+-0.517815	7.272222	-10.936008
+11.573480	7.901590	6.021224
+6.725729	8.468981	-9.467427
+11.543897	7.778207	-4.038294
+12.019814	6.125213	4.811731
+0.714647	7.412779	14.068272
+11.961952	7.641765	-2.963300
+12.043830	7.602178	4.734247
+11.001840	8.292581	7.194740
+-1.420704	6.634401	-10.771141
+-8.377614	8.070473	-5.465562
+6.837346	6.973116	-9.400915
+10.851725	7.145330	7.460083
+11.839520	8.157787	-3.310723
+-0.045765	6.816529	14.140329
+7.767068	6.716549	-8.776756
+11.143391	7.994854	-4.855834
+4.686888	6.970783	12.969503
+9.536979	5.656365	9.328494
+9.921247	6.053596	-6.701148
+10.043345	8.633219	-6.546758
+12.380600	8.376366	3.375698
+12.424267	8.227657	-1.130708
+8.834452	8.358028	-7.879805
+3.767730	7.730299	-10.689886
+6.663138	5.369636	-9.503986
+1.190215	6.223768	-11.039000
+12.165972	6.949678	4.309940
+1.518233	8.460718	-11.028366
+3.591034	9.063961	-10.733651
+1.079981	6.817149	14.018742
+-7.273814	7.706763	12.724155
+-9.052916	6.963534	11.702736
+8.996027	7.734946	-7.723853
+1.092803	7.368163	-11.040283
+4.363196	4.346755	13.108791
+0.690027	6.923059	-11.036465
+12.587260	8.779056	1.683240
+6.634957	7.657954	11.904573
+11.975506	8.589762	4.950373
+2.110904	7.448938	-10.984442
+11.540994	9.381644	-4.044822
+6.970112	7.780697	11.676939
+12.388652	6.189067	3.333082
+12.605075	6.378969	0.849993
+10.978084	8.772089	-5.153393
+12.585490	6.986055	1.715030
+2.751024	7.671819	13.664314
+12.537121	8.303021	-0.316649
+1.308415	6.876874	13.982782
+10.865184	9.460500	7.436870
+11.687053	7.223537	5.746323
+10.069951	7.900564	-6.512411
+7.175601	8.364393	-9.188740
+11.678670	7.591856	-3.723091
+12.461858	8.089101	-0.899739
+8.520515	7.664004	10.408408
+2.876256	8.835012	13.628983
+11.474377	8.312249	-4.191893
+0.647410	6.436073	-11.035200
+5.091069	9.039446	-10.262686
+-4.827053	7.633425	13.648665
+8.529233	6.425375	-8.158851
+-0.076436	7.897815	-10.988352
+11.858333	7.899197	-3.259440
+12.550631	6.787005	-0.181148
+12.275964	6.198066	-1.859672
+-7.551833	7.304432	-6.792690
+4.947653	6.204706	-10.317768
+11.973761	9.058357	-2.927937
+-5.622987	6.496857	-8.779646
+1.194783	6.917019	14.001153
+1.335675	6.229102	-11.035485
+9.038580	6.474315	-7.681779
+8.394513	8.804027	-8.275935
+8.215303	6.809095	10.689756
+11.509541	8.333502	6.168581
+12.210172	9.292370	-2.126418
+10.330403	8.084809	-6.161870
+12.566178	6.934155	-0.002417
+4.231479	7.393143	13.162688
+12.318176	6.529940	3.682619
+9.836524	9.706331	-6.805275
+6.608478	7.026045	11.921942
+10.630705	7.832623	-5.721568
+10.174660	5.118122	-6.374681
+12.473536	7.750357	-0.821830
+-12.668798	7.491675	8.270793
+-12.681141	7.181448	8.254342
+12.602678	7.825215	0.710613
+8.643159	7.682691	10.290303
+12.517936	6.741289	-0.487679
+12.503627	6.777442	-0.603181
+11.410904	7.516606	-4.327390
+1.230476	6.764797	-11.038219
+-2.530176	7.028375	-10.456400
+4.307273	7.006116	-10.537270
+11.290951	6.929425	6.638435
+0.579190	5.712850	14.084162
+11.450532	6.135391	6.300325
+-2.573520	6.860170	-10.441445
+11.388337	8.839326	-4.374542
+-0.954032	7.822137	-10.866200
+3.058484	7.543379	13.575299
+11.406602	8.985942	-4.336418
+3.806238	7.834165	-10.679946
+10.352962	8.041787	8.252019
+12.594873	6.920176	0.469262
+6.846841	8.138366	11.762367
+11.060773	6.515609	7.086486
+8.213095	8.047046	-8.428060
+9.757162	6.369705	-6.900682
+4.425773	8.537267	-10.499847
+8.500542	6.321640	10.427360
+3.301702	6.389957	13.499391
+5.278767	7.039769	-10.187218
+12.323542	7.817334	-1.648879
+6.816350	13.010634	-9.413556
+2.257864	11.652705	-10.968609
+10.024868	13.372918	-6.570460
+5.268847	12.710487	12.693676
+-7.648990	14.390018	12.535679
+7.442461	11.185614	11.330241
+10.861154	11.536425	-5.352630
+9.757610	11.431015	-6.900149
+12.539733	12.559690	-0.291576
+10.518617	13.486570	-5.890861
+6.102679	14.897688	12.237279
+6.818181	12.739468	-9.412456
+11.082247	12.576736	7.046426
+-4.953760	11.477301	-9.263072
+11.370186	12.719958	-4.412093
+0.291620	12.105859	14.113495
+10.359657	12.809526	-6.120777
+11.829601	12.905074	-3.337479
+10.795345	13.957346	-5.461048
+1.295528	12.272009	-11.036647
+10.503142	12.260357	8.026400
+12.041934	11.407420	-2.716374
+10.557027	11.828163	-5.833544
+-2.013150	12.391000	14.135999
+-2.751254	13.552213	-10.377934
+7.954050	12.971966	10.917240
+-0.547994	14.528935	14.165238
+12.026954	13.502657	-2.764003
+6.831490	11.947686	-9.404447
+-0.905839	13.302637	14.172077
+6.037660	13.315730	-9.841417
+12.539100	11.968838	2.295000
+5.802316	12.411208	-9.955829
+-2.794968	12.167921	14.058275
+12.595325	12.156981	1.514184
+11.348187	10.741401	6.520029
+11.414030	11.153702	-4.320817
+-0.991859	11.282232	14.172373
+12.042261	11.510797	4.739361
+12.541196	12.940338	-0.277318
+12.602103	13.545561	0.686324
+9.928511	11.325702	-6.692107
+12.509411	11.377441	2.556429
+11.241896	11.205101	-4.668596
+12.605887	10.807339	1.030159
+-15.708033	11.798480	-1.996230
+9.942177	12.886460	-6.675051
+8.102531	12.778732	-8.517768
+8.934461	13.118438	9.997292
+10.080695	11.672861	-6.498469
+11.055420	10.979794	-5.016635
+-8.458384	12.854959	-5.306337
+11.016262	12.748333	7.168472
+2.052397	12.626166	-10.990198
+12.070482	11.433381	4.646170
+11.302940	10.815754	6.613883
+10.239128	13.784510	8.416588
+11.707213	11.737680	-3.653036
+6.226668	11.325156	-9.744666
+11.921001	12.866313	5.113951
+6.548397	12.889001	11.961026
+3.072722	12.203996	-10.844769
+-3.092174	12.500619	-10.246044
+4.408490	11.850078	-10.505394
+8.133011	10.726120	-8.493260
+0.166699	12.214470	14.124382
+-9.422538	13.360862	-2.013800
+11.745099	11.950619	-3.558083
+11.270807	11.691360	-4.612097
+9.553701	13.403114	-7.136375
+8.033626	11.611145	-8.572562
+5.512306	11.541071	-10.087862
+4.805144	11.788611	12.916142
+12.032583	10.748820	4.770748
+7.400781	12.176786	-9.038270
+8.638421	14.775748	-8.061276
+8.608536	12.492453	-8.088225
+10.466820	13.145324	8.081896
+12.310793	12.713380	-1.707071
+12.553119	14.450536	2.150992
+9.044662	13.863294	-7.675731
+6.049073	12.224750	-9.835699
+6.308746	12.163116	12.112509
+11.801321	12.980314	-3.412737
+7.796368	12.046487	11.048959
+12.034987	12.044517	4.762978
+7.685738	13.342734	-8.836680
+4.438912	11.306218	13.077087
+12.602825	13.429962	1.276818
+10.299320	11.764462	8.330233
+6.810508	11.533278	-9.417063
+12.097073	11.420490	-2.534911
+12.397625	11.283920	3.284623
+12.594208	12.992940	1.540732
+12.265588	12.302767	-1.903488
+6.218907	13.352670	-9.748726
+9.823865	12.483501	8.975476
+8.821288	11.272393	-7.892252
+8.890174	12.198831	-7.826691
+11.012217	13.875882	-5.093544
+9.175404	12.335714	9.740652
+10.354194	11.896905	8.250208
+9.825917	13.183954	-6.818144
+12.605451	13.080329	1.106919
+10.222061	14.161065	-6.310945
+4.795493	12.229645	-10.373803
+11.312162	12.446728	-4.529985
+9.148875	11.966735	9.769580
+12.572241	10.591872	1.918801
+12.523526	13.636959	-0.439984
+-2.377144	14.246648	14.105221
+1.891963	13.138079	-11.004381
+9.572581	12.484909	-7.115023
+7.203831	11.524118	-9.170290
+1.215021	11.916613	-11.038536
+5.078032	12.699360	12.787813
+2.857864	12.100193	13.634252
+10.627966	11.916266	-5.725780
+-3.223613	12.470251	-10.191562
+2.656617	14.173930	13.690114
+7.722110	13.614281	-8.810016
+7.084823	14.231250	11.595610
+6.639163	12.587786	11.901806
+12.461683	12.443429	-0.900881
+-3.201935	14.196611	-10.200690
+12.571885	13.312754	0.071914
+11.694428	11.472622	5.727836
+-2.865375	14.017477	14.049136
+11.515710	12.461427	-4.101255
+12.349324	12.775042	-1.526859
+9.435628	13.653247	9.447511
+12.489510	9.970717	-0.709334
+10.752938	11.917662	7.627205
+1.509901	10.585991	13.947846
+12.573589	12.797583	0.095310
+6.431959	11.849570	12.035500
+12.364235	11.613482	3.459987
+11.995984	11.839635	-2.860398
+11.561106	11.746274	6.050134
+10.202932	11.574616	-6.336773
+8.668443	13.876106	10.265578
+12.067683	14.660040	-2.632884
+11.776051	12.477936	5.517463
+-5.792090	13.230049	-8.644095
+9.512349	12.348706	9.357678
+5.731143	14.105920	-9.989136
+6.372725	14.020783	-9.666853
+12.481504	11.511436	2.766999
+5.164611	13.948339	12.745556
+6.211802	12.918318	12.171827
+12.361837	13.506064	-1.465344
+5.710258	13.650101	12.461467
+-8.318645	12.802142	12.164631
+8.571070	12.980166	-8.121752
+11.884710	12.428090	-3.186317
+3.929851	13.009531	-10.647056
+10.174938	11.712964	-6.374309
+7.045846	11.371670	-9.272054
+5.486601	12.413141	-10.099098
+0.559375	11.929560	14.086375
+7.449142	12.459329	11.325107
+5.566381	12.872991	-10.063979
+12.286527	13.118911	-1.814320
+2.916571	11.257396	13.617340
+12.374604	12.621340	3.406936
+3.833434	12.283449	13.316062
+-1.231762	11.409338	-10.812203
+12.210339	12.747229	4.140668
+11.374602	13.289511	6.464335
+7.893535	13.758496	-8.681391
+0.792088	14.361978	14.058588
+11.825154	12.368874	5.385400
+-6.611497	13.245930	-7.896008
+1.038591	12.866272	-11.040626
+5.359629	13.396163	12.647587
+12.524565	12.821981	-0.430942
+5.654192	13.249748	12.492109
+12.224656	13.369546	-2.069830
+12.012856	13.307606	4.833863
+1.208399	13.554493	-11.038665
+9.779401	13.035889	-6.874150
+11.213722	12.968358	-4.722963
+6.578263	13.124704	11.941654
+10.379418	13.508596	-6.092810
+12.600783	12.577681	0.636792
+12.496548	12.115808	-0.657283
+8.943514	11.012765	-7.775191
+6.393159	11.448999	12.059948
+3.169164	12.720775	13.541364
+12.460552	14.052397	2.909725
+11.413797	13.785224	6.380418
+10.479274	13.094805	8.062938
+2.585194	11.950007	-10.926237
+5.179030	13.886467	12.738445
+-4.847286	10.088201	-9.332818
+5.492716	12.821335	-10.096432
+9.484899	12.988775	-7.213322
+12.206100	12.534476	4.157254
+12.386123	12.607700	3.346550
+9.470117	12.896677	-7.229680
+1.986997	13.149403	-10.996262
+11.484164	12.064795	6.225717
+9.003560	12.958334	-7.716435
+11.230558	14.423715	6.760175
+11.719316	13.339674	5.664829
+5.206505	12.015858	12.724837
+4.546260	13.496732	-10.460326
+7.824019	12.647525	-8.734144
+12.296771	13.504773	3.779817
+4.297477	11.727517	-10.540300
+8.769399	10.921765	-7.940944
+12.273029	11.992707	-1.872138
+11.440961	11.379585	-4.263766
+10.176717	12.068973	8.504606
+8.463815	12.436811	-8.216154
+11.757262	15.014985	5.566863
+9.375816	12.689328	-7.332615
+5.909809	11.834022	-9.904393
+-0.335466	12.868908	14.156885
+12.243396	12.320796	-1.994903
+12.018828	13.358858	-2.789560
+9.189282	14.075697	-7.529279
+10.763486	14.288849	-5.512625
+7.484766	12.003201	11.297624
+8.313569	13.434181	-8.344581
+6.614144	13.366956	-9.532238
+10.764833	13.001663	-5.510458
+-0.913614	13.054739	-10.873434
+12.426540	10.750134	3.120766
+8.796618	11.406992	-7.915475
+-8.338705	12.009549	12.152801
+9.746059	10.644060	9.073682
+5.245496	12.543541	-10.200878
+-5.216040	11.922802	13.534704
+12.507921	11.303995	-0.569430
+8.999698	11.851172	-7.720239
+7.999887	11.098867	-8.599084
+12.604435	11.896056	0.802713
+12.309317	12.407041	-1.713724
+9.091001	13.103821	9.832103
+12.578598	13.623783	0.167905
+12.605773	12.973943	0.933723
+3.006113	12.329530	-10.857207
+7.456406	13.072589	-8.999918
+10.497436	12.982610	-5.922165
+-0.006259	13.324285	14.137608
+-2.849774	12.717538	-10.341195
+5.122999	11.719644	-10.250120
+10.748490	12.534318	-5.536704
+11.613397	12.555988	-3.878809
+11.009435	10.958159	-5.098451
+1.519280	12.784584	-11.028317
+-11.360942	13.001116	9.789335
+7.702289	12.774521	11.125631
+-4.811628	11.626781	-9.355764
+12.182510	14.877548	-2.231530
+-2.373355	11.002604	14.105589
+11.731531	12.732347	5.633549
+12.315108	11.833380	3.696773
+11.606759	12.487041	5.942500
+12.165032	10.353026	-2.296078
+5.922907	13.682240	-9.898032
+10.132602	14.255898	8.565916
+-9.535315	12.455212	11.364959
+0.694441	13.177736	-11.036587
+12.445946	12.303132	3.003127
+11.377638	13.586940	6.457891
+7.984023	13.285935	10.891737
+12.597578	11.793295	0.538273
+10.676806	12.012207	7.752274
+-6.659653	14.462967	13.004399
+12.605892	13.361458	0.965502
+1.196684	13.353240	-11.038884
+-2.348789	12.394022	-10.516747
+7.623316	13.919811	-8.881942
+10.969680	14.792566	7.252812
+10.985844	9.951648	7.223712
+10.009515	13.349031	8.733173
+10.859816	12.378512	-5.354860
+-1.343427	12.780648	-10.788363
+7.629084	13.132946	11.184326
+12.573159	12.563987	1.906156
+10.228458	13.872663	-6.302276
+8.862402	11.989036	10.071472
+11.042450	11.174950	7.120405
+9.611716	12.667233	-7.070433
+11.435302	10.461704	6.333704
+12.507807	10.900643	2.569299
+12.457351	11.110768	-0.928945
+-6.162348	11.854508	-8.326001
+5.336361	13.401175	-10.163283
+9.354740	11.414215	-7.355292
+6.723762	11.572013	11.845674
+12.591959	13.412603	0.403892
+12.578839	13.500807	1.823640
+8.058742	11.480587	-8.552688
+2.518108	13.533819	-10.935723
+11.317532	13.155457	6.583824
+10.954981	12.570638	7.279126
+8.754584	11.017792	-7.954738
+7.594358	11.167188	-8.902726
+7.799488	11.127066	-8.752565
+11.618796	13.341903	-3.866156
+8.449134	11.947829	10.475784
+12.556698	12.974315	2.111241
+1.479085	11.899375	-11.030147
+12.567905	11.801877	0.019475
+7.194893	11.643279	11.515872
+12.464745	11.370067	2.882043
+11.490820	13.273288	6.210803
+6.042864	12.211185	12.272572
+10.882708	13.492933	-5.316554
+11.987782	11.876503	-2.885477
+11.947972	12.593412	-3.004712
+12.490590	12.422387	-0.701453
+12.157602	11.169810	-2.323104
+12.581520	12.124597	0.213393
+6.023082	11.662650	12.284153
+2.879205	13.263030	-10.879754
+7.738969	12.845647	-8.797583
+9.562994	11.629199	-7.125878
+10.923424	12.459630	-5.247616
+7.238147	12.918732	-9.147704
+0.701508	11.476862	-11.036778
+11.972887	12.732145	-2.930565
+10.041759	12.371146	-6.548798
+12.516962	12.507152	-0.495828
+10.494884	12.155889	-5.925923
+12.458969	12.129027	2.920068
+6.929190	10.921235	-9.344902
+3.280984	13.624737	-10.803188
+12.451864	11.441014	-0.963893
+5.860639	13.207941	12.377587
+5.831496	10.374859	-9.942002
+12.299420	11.651606	-1.757889
+9.875874	10.482600	8.908753
+11.285587	14.196296	-4.582928
+9.302950	13.222287	9.599141
+11.976094	10.925272	-2.920908
+3.225604	11.425352	13.523669
+-6.657874	11.786150	-7.848530
+1.812385	13.490156	-11.010549
+11.438219	12.407394	-4.269611
+11.851548	13.250619	5.312542
+8.254149	12.912385	10.654906
+7.655285	12.787032	11.163414
+4.965419	12.641398	12.841658
+3.861025	12.735867	13.305884
+9.688245	12.490498	9.145440
+12.399469	12.610112	3.274533
+7.573658	13.352368	-8.917501
+10.485309	13.939863	8.053724
+10.550176	12.944262	7.953620
+-1.619197	11.458821	-10.724171
+12.353292	12.051927	-1.507523
+12.352157	12.983628	3.520332
+12.071890	12.920490	4.641454
+4.737487	13.452414	-10.394520
+10.399206	12.404573	-6.064635
+11.392349	12.432645	6.426532
+12.480185	12.728288	2.776325
+11.565229	13.740171	-3.989998
+12.145384	13.470776	-2.367033
+12.477754	13.225845	-0.792832
+-3.348618	11.879689	-10.137822
+12.318879	11.227971	3.679364
+4.231117	11.635667	13.162834
+-5.480287	12.160751	-8.889617
+12.591062	12.795535	0.385167
+12.457524	12.755093	-0.927828
+10.223449	12.351174	8.438844
+9.593354	13.172997	-7.091410
+11.575734	12.112569	-3.966005
+12.152331	11.443093	4.360170
+12.348663	14.256436	-1.530064
+6.847284	11.194604	11.762064
+4.073817	14.546925	13.225130
+10.824423	12.162309	-5.413460
+11.865408	13.033303	5.273727
+10.140920	11.969731	-6.419514
+5.192349	11.128866	12.731858
+-3.407399	12.633173	-10.111893
+8.587361	12.673915	-8.107210
+-15.338441	12.281832	2.524663
+11.303333	13.162835	6.613076
+9.755416	12.628960	9.061971
+8.905650	11.418342	10.027089
+11.578258	12.843210	-3.960219
+10.973124	12.361134	-5.162023
+12.531230	12.972506	2.369204
+12.381398	12.228963	3.371510
+12.186071	12.875782	-2.218209
+-4.358588	12.063279	-9.630134
+8.443310	12.382025	10.481237
+9.553932	11.246692	-7.136115
+8.503806	13.110051	10.424269
+12.406159	13.831786	-1.233169
+12.440832	12.176057	-1.032280
+9.612878	11.898241	-7.069101
+-0.945664	11.501216	-10.867711
\ No newline at end of file
diff --git a/src/test/resources/logistic_regression_data.txt b/src/test/resources/logistic_regression_data.txt
new file mode 100644
index 0000000..b874bd8
--- /dev/null
+++ b/src/test/resources/logistic_regression_data.txt
@@ -0,0 +1,768 @@
+6,148,72,35,0,33.6,0.627,50,1
+1,85,66,29,0,26.6,0.351,31,0
+8,183,64,0,0,23.3,0.672,32,1
+1,89,66,23,94,28.1,0.167,21,0
+0,137,40,35,168,43.1,2.288,33,1
+5,116,74,0,0,25.6,0.201,30,0
+3,78,50,32,88,31,0.248,26,1
+10,115,0,0,0,35.3,0.134,29,0
+2,197,70,45,543,30.5,0.158,53,1
+8,125,96,0,0,0,0.232,54,1
+4,110,92,0,0,37.6,0.191,30,0
+10,168,74,0,0,38,0.537,34,1
+10,139,80,0,0,27.1,1.441,57,0
+1,189,60,23,846,30.1,0.398,59,1
+5,166,72,19,175,25.8,0.587,51,1
+7,100,0,0,0,30,0.484,32,1
+0,118,84,47,230,45.8,0.551,31,1
+7,107,74,0,0,29.6,0.254,31,1
+1,103,30,38,83,43.3,0.183,33,0
+1,115,70,30,96,34.6,0.529,32,1
+3,126,88,41,235,39.3,0.704,27,0
+8,99,84,0,0,35.4,0.388,50,0
+7,196,90,0,0,39.8,0.451,41,1
+9,119,80,35,0,29,0.263,29,1
+11,143,94,33,146,36.6,0.254,51,1
+10,125,70,26,115,31.1,0.205,41,1
+7,147,76,0,0,39.4,0.257,43,1
+1,97,66,15,140,23.2,0.487,22,0
+13,145,82,19,110,22.2,0.245,57,0
+5,117,92,0,0,34.1,0.337,38,0
+5,109,75,26,0,36,0.546,60,0
+3,158,76,36,245,31.6,0.851,28,1
+3,88,58,11,54,24.8,0.267,22,0
+6,92,92,0,0,19.9,0.188,28,0
+10,122,78,31,0,27.6,0.512,45,0
+4,103,60,33,192,24,0.966,33,0
+11,138,76,0,0,33.2,0.42,35,0
+9,102,76,37,0,32.9,0.665,46,1
+2,90,68,42,0,38.2,0.503,27,1
+4,111,72,47,207,37.1,1.39,56,1
+3,180,64,25,70,34,0.271,26,0
+7,133,84,0,0,40.2,0.696,37,0
+7,106,92,18,0,22.7,0.235,48,0
+9,171,110,24,240,45.4,0.721,54,1
+7,159,64,0,0,27.4,0.294,40,0
+0,180,66,39,0,42,1.893,25,1
+1,146,56,0,0,29.7,0.564,29,0
+2,71,70,27,0,28,0.586,22,0
+7,103,66,32,0,39.1,0.344,31,1
+7,105,0,0,0,0,0.305,24,0
+1,103,80,11,82,19.4,0.491,22,0
+1,101,50,15,36,24.2,0.526,26,0
+5,88,66,21,23,24.4,0.342,30,0
+8,176,90,34,300,33.7,0.467,58,1
+7,150,66,42,342,34.7,0.718,42,0
+1,73,50,10,0,23,0.248,21,0
+7,187,68,39,304,37.7,0.254,41,1
+0,100,88,60,110,46.8,0.962,31,0
+0,146,82,0,0,40.5,1.781,44,0
+0,105,64,41,142,41.5,0.173,22,0
+2,84,0,0,0,0,0.304,21,0
+8,133,72,0,0,32.9,0.27,39,1
+5,44,62,0,0,25,0.587,36,0
+2,141,58,34,128,25.4,0.699,24,0
+7,114,66,0,0,32.8,0.258,42,1
+5,99,74,27,0,29,0.203,32,0
+0,109,88,30,0,32.5,0.855,38,1
+2,109,92,0,0,42.7,0.845,54,0
+1,95,66,13,38,19.6,0.334,25,0
+4,146,85,27,100,28.9,0.189,27,0
+2,100,66,20,90,32.9,0.867,28,1
+5,139,64,35,140,28.6,0.411,26,0
+13,126,90,0,0,43.4,0.583,42,1
+4,129,86,20,270,35.1,0.231,23,0
+1,79,75,30,0,32,0.396,22,0
+1,0,48,20,0,24.7,0.14,22,0
+7,62,78,0,0,32.6,0.391,41,0
+5,95,72,33,0,37.7,0.37,27,0
+0,131,0,0,0,43.2,0.27,26,1
+2,112,66,22,0,25,0.307,24,0
+3,113,44,13,0,22.4,0.14,22,0
+2,74,0,0,0,0,0.102,22,0
+7,83,78,26,71,29.3,0.767,36,0
+0,101,65,28,0,24.6,0.237,22,0
+5,137,108,0,0,48.8,0.227,37,1
+2,110,74,29,125,32.4,0.698,27,0
+13,106,72,54,0,36.6,0.178,45,0
+2,100,68,25,71,38.5,0.324,26,0
+15,136,70,32,110,37.1,0.153,43,1
+1,107,68,19,0,26.5,0.165,24,0
+1,80,55,0,0,19.1,0.258,21,0
+4,123,80,15,176,32,0.443,34,0
+7,81,78,40,48,46.7,0.261,42,0
+4,134,72,0,0,23.8,0.277,60,1
+2,142,82,18,64,24.7,0.761,21,0
+6,144,72,27,228,33.9,0.255,40,0
+2,92,62,28,0,31.6,0.13,24,0
+1,71,48,18,76,20.4,0.323,22,0
+6,93,50,30,64,28.7,0.356,23,0
+1,122,90,51,220,49.7,0.325,31,1
+1,163,72,0,0,39,1.222,33,1
+1,151,60,0,0,26.1,0.179,22,0
+0,125,96,0,0,22.5,0.262,21,0
+1,81,72,18,40,26.6,0.283,24,0
+2,85,65,0,0,39.6,0.93,27,0
+1,126,56,29,152,28.7,0.801,21,0
+1,96,122,0,0,22.4,0.207,27,0
+4,144,58,28,140,29.5,0.287,37,0
+3,83,58,31,18,34.3,0.336,25,0
+0,95,85,25,36,37.4,0.247,24,1
+3,171,72,33,135,33.3,0.199,24,1
+8,155,62,26,495,34,0.543,46,1
+1,89,76,34,37,31.2,0.192,23,0
+4,76,62,0,0,34,0.391,25,0
+7,160,54,32,175,30.5,0.588,39,1
+4,146,92,0,0,31.2,0.539,61,1
+5,124,74,0,0,34,0.22,38,1
+5,78,48,0,0,33.7,0.654,25,0
+4,97,60,23,0,28.2,0.443,22,0
+4,99,76,15,51,23.2,0.223,21,0
+0,162,76,56,100,53.2,0.759,25,1
+6,111,64,39,0,34.2,0.26,24,0
+2,107,74,30,100,33.6,0.404,23,0
+5,132,80,0,0,26.8,0.186,69,0
+0,113,76,0,0,33.3,0.278,23,1
+1,88,30,42,99,55,0.496,26,1
+3,120,70,30,135,42.9,0.452,30,0
+1,118,58,36,94,33.3,0.261,23,0
+1,117,88,24,145,34.5,0.403,40,1
+0,105,84,0,0,27.9,0.741,62,1
+4,173,70,14,168,29.7,0.361,33,1
+9,122,56,0,0,33.3,1.114,33,1
+3,170,64,37,225,34.5,0.356,30,1
+8,84,74,31,0,38.3,0.457,39,0
+2,96,68,13,49,21.1,0.647,26,0
+2,125,60,20,140,33.8,0.088,31,0
+0,100,70,26,50,30.8,0.597,21,0
+0,93,60,25,92,28.7,0.532,22,0
+0,129,80,0,0,31.2,0.703,29,0
+5,105,72,29,325,36.9,0.159,28,0
+3,128,78,0,0,21.1,0.268,55,0
+5,106,82,30,0,39.5,0.286,38,0
+2,108,52,26,63,32.5,0.318,22,0
+10,108,66,0,0,32.4,0.272,42,1
+4,154,62,31,284,32.8,0.237,23,0
+0,102,75,23,0,0,0.572,21,0
+9,57,80,37,0,32.8,0.096,41,0
+2,106,64,35,119,30.5,1.4,34,0
+5,147,78,0,0,33.7,0.218,65,0
+2,90,70,17,0,27.3,0.085,22,0
+1,136,74,50,204,37.4,0.399,24,0
+4,114,65,0,0,21.9,0.432,37,0
+9,156,86,28,155,34.3,1.189,42,1
+1,153,82,42,485,40.6,0.687,23,0
+8,188,78,0,0,47.9,0.137,43,1
+7,152,88,44,0,50,0.337,36,1
+2,99,52,15,94,24.6,0.637,21,0
+1,109,56,21,135,25.2,0.833,23,0
+2,88,74,19,53,29,0.229,22,0
+17,163,72,41,114,40.9,0.817,47,1
+4,151,90,38,0,29.7,0.294,36,0
+7,102,74,40,105,37.2,0.204,45,0
+0,114,80,34,285,44.2,0.167,27,0
+2,100,64,23,0,29.7,0.368,21,0
+0,131,88,0,0,31.6,0.743,32,1
+6,104,74,18,156,29.9,0.722,41,1
+3,148,66,25,0,32.5,0.256,22,0
+4,120,68,0,0,29.6,0.709,34,0
+4,110,66,0,0,31.9,0.471,29,0
+3,111,90,12,78,28.4,0.495,29,0
+6,102,82,0,0,30.8,0.18,36,1
+6,134,70,23,130,35.4,0.542,29,1
+2,87,0,23,0,28.9,0.773,25,0
+1,79,60,42,48,43.5,0.678,23,0
+2,75,64,24,55,29.7,0.37,33,0
+8,179,72,42,130,32.7,0.719,36,1
+6,85,78,0,0,31.2,0.382,42,0
+0,129,110,46,130,67.1,0.319,26,1
+5,143,78,0,0,45,0.19,47,0
+5,130,82,0,0,39.1,0.956,37,1
+6,87,80,0,0,23.2,0.084,32,0
+0,119,64,18,92,34.9,0.725,23,0
+1,0,74,20,23,27.7,0.299,21,0
+5,73,60,0,0,26.8,0.268,27,0
+4,141,74,0,0,27.6,0.244,40,0
+7,194,68,28,0,35.9,0.745,41,1
+8,181,68,36,495,30.1,0.615,60,1
+1,128,98,41,58,32,1.321,33,1
+8,109,76,39,114,27.9,0.64,31,1
+5,139,80,35,160,31.6,0.361,25,1
+3,111,62,0,0,22.6,0.142,21,0
+9,123,70,44,94,33.1,0.374,40,0
+7,159,66,0,0,30.4,0.383,36,1
+11,135,0,0,0,52.3,0.578,40,1
+8,85,55,20,0,24.4,0.136,42,0
+5,158,84,41,210,39.4,0.395,29,1
+1,105,58,0,0,24.3,0.187,21,0
+3,107,62,13,48,22.9,0.678,23,1
+4,109,64,44,99,34.8,0.905,26,1
+4,148,60,27,318,30.9,0.15,29,1
+0,113,80,16,0,31,0.874,21,0
+1,138,82,0,0,40.1,0.236,28,0
+0,108,68,20,0,27.3,0.787,32,0
+2,99,70,16,44,20.4,0.235,27,0
+6,103,72,32,190,37.7,0.324,55,0
+5,111,72,28,0,23.9,0.407,27,0
+8,196,76,29,280,37.5,0.605,57,1
+5,162,104,0,0,37.7,0.151,52,1
+1,96,64,27,87,33.2,0.289,21,0
+7,184,84,33,0,35.5,0.355,41,1
+2,81,60,22,0,27.7,0.29,25,0
+0,147,85,54,0,42.8,0.375,24,0
+7,179,95,31,0,34.2,0.164,60,0
+0,140,65,26,130,42.6,0.431,24,1
+9,112,82,32,175,34.2,0.26,36,1
+12,151,70,40,271,41.8,0.742,38,1
+5,109,62,41,129,35.8,0.514,25,1
+6,125,68,30,120,30,0.464,32,0
+5,85,74,22,0,29,1.224,32,1
+5,112,66,0,0,37.8,0.261,41,1
+0,177,60,29,478,34.6,1.072,21,1
+2,158,90,0,0,31.6,0.805,66,1
+7,119,0,0,0,25.2,0.209,37,0
+7,142,60,33,190,28.8,0.687,61,0
+1,100,66,15,56,23.6,0.666,26,0
+1,87,78,27,32,34.6,0.101,22,0
+0,101,76,0,0,35.7,0.198,26,0
+3,162,52,38,0,37.2,0.652,24,1
+4,197,70,39,744,36.7,2.329,31,0
+0,117,80,31,53,45.2,0.089,24,0
+4,142,86,0,0,44,0.645,22,1
+6,134,80,37,370,46.2,0.238,46,1
+1,79,80,25,37,25.4,0.583,22,0
+4,122,68,0,0,35,0.394,29,0
+3,74,68,28,45,29.7,0.293,23,0
+4,171,72,0,0,43.6,0.479,26,1
+7,181,84,21,192,35.9,0.586,51,1
+0,179,90,27,0,44.1,0.686,23,1
+9,164,84,21,0,30.8,0.831,32,1
+0,104,76,0,0,18.4,0.582,27,0
+1,91,64,24,0,29.2,0.192,21,0
+4,91,70,32,88,33.1,0.446,22,0
+3,139,54,0,0,25.6,0.402,22,1
+6,119,50,22,176,27.1,1.318,33,1
+2,146,76,35,194,38.2,0.329,29,0
+9,184,85,15,0,30,1.213,49,1
+10,122,68,0,0,31.2,0.258,41,0
+0,165,90,33,680,52.3,0.427,23,0
+9,124,70,33,402,35.4,0.282,34,0
+1,111,86,19,0,30.1,0.143,23,0
+9,106,52,0,0,31.2,0.38,42,0
+2,129,84,0,0,28,0.284,27,0
+2,90,80,14,55,24.4,0.249,24,0
+0,86,68,32,0,35.8,0.238,25,0
+12,92,62,7,258,27.6,0.926,44,1
+1,113,64,35,0,33.6,0.543,21,1
+3,111,56,39,0,30.1,0.557,30,0
+2,114,68,22,0,28.7,0.092,25,0
+1,193,50,16,375,25.9,0.655,24,0
+11,155,76,28,150,33.3,1.353,51,1
+3,191,68,15,130,30.9,0.299,34,0
+3,141,0,0,0,30,0.761,27,1
+4,95,70,32,0,32.1,0.612,24,0
+3,142,80,15,0,32.4,0.2,63,0
+4,123,62,0,0,32,0.226,35,1
+5,96,74,18,67,33.6,0.997,43,0
+0,138,0,0,0,36.3,0.933,25,1
+2,128,64,42,0,40,1.101,24,0
+0,102,52,0,0,25.1,0.078,21,0
+2,146,0,0,0,27.5,0.24,28,1
+10,101,86,37,0,45.6,1.136,38,1
+2,108,62,32,56,25.2,0.128,21,0
+3,122,78,0,0,23,0.254,40,0
+1,71,78,50,45,33.2,0.422,21,0
+13,106,70,0,0,34.2,0.251,52,0
+2,100,70,52,57,40.5,0.677,25,0
+7,106,60,24,0,26.5,0.296,29,1
+0,104,64,23,116,27.8,0.454,23,0
+5,114,74,0,0,24.9,0.744,57,0
+2,108,62,10,278,25.3,0.881,22,0
+0,146,70,0,0,37.9,0.334,28,1
+10,129,76,28,122,35.9,0.28,39,0
+7,133,88,15,155,32.4,0.262,37,0
+7,161,86,0,0,30.4,0.165,47,1
+2,108,80,0,0,27,0.259,52,1
+7,136,74,26,135,26,0.647,51,0
+5,155,84,44,545,38.7,0.619,34,0
+1,119,86,39,220,45.6,0.808,29,1
+4,96,56,17,49,20.8,0.34,26,0
+5,108,72,43,75,36.1,0.263,33,0
+0,78,88,29,40,36.9,0.434,21,0
+0,107,62,30,74,36.6,0.757,25,1
+2,128,78,37,182,43.3,1.224,31,1
+1,128,48,45,194,40.5,0.613,24,1
+0,161,50,0,0,21.9,0.254,65,0
+6,151,62,31,120,35.5,0.692,28,0
+2,146,70,38,360,28,0.337,29,1
+0,126,84,29,215,30.7,0.52,24,0
+14,100,78,25,184,36.6,0.412,46,1
+8,112,72,0,0,23.6,0.84,58,0
+0,167,0,0,0,32.3,0.839,30,1
+2,144,58,33,135,31.6,0.422,25,1
+5,77,82,41,42,35.8,0.156,35,0
+5,115,98,0,0,52.9,0.209,28,1
+3,150,76,0,0,21,0.207,37,0
+2,120,76,37,105,39.7,0.215,29,0
+10,161,68,23,132,25.5,0.326,47,1
+0,137,68,14,148,24.8,0.143,21,0
+0,128,68,19,180,30.5,1.391,25,1
+2,124,68,28,205,32.9,0.875,30,1
+6,80,66,30,0,26.2,0.313,41,0
+0,106,70,37,148,39.4,0.605,22,0
+2,155,74,17,96,26.6,0.433,27,1
+3,113,50,10,85,29.5,0.626,25,0
+7,109,80,31,0,35.9,1.127,43,1
+2,112,68,22,94,34.1,0.315,26,0
+3,99,80,11,64,19.3,0.284,30,0
+3,182,74,0,0,30.5,0.345,29,1
+3,115,66,39,140,38.1,0.15,28,0
+6,194,78,0,0,23.5,0.129,59,1
+4,129,60,12,231,27.5,0.527,31,0
+3,112,74,30,0,31.6,0.197,25,1
+0,124,70,20,0,27.4,0.254,36,1
+13,152,90,33,29,26.8,0.731,43,1
+2,112,75,32,0,35.7,0.148,21,0
+1,157,72,21,168,25.6,0.123,24,0
+1,122,64,32,156,35.1,0.692,30,1
+10,179,70,0,0,35.1,0.2,37,0
+2,102,86,36,120,45.5,0.127,23,1
+6,105,70,32,68,30.8,0.122,37,0
+8,118,72,19,0,23.1,1.476,46,0
+2,87,58,16,52,32.7,0.166,25,0
+1,180,0,0,0,43.3,0.282,41,1
+12,106,80,0,0,23.6,0.137,44,0
+1,95,60,18,58,23.9,0.26,22,0
+0,165,76,43,255,47.9,0.259,26,0
+0,117,0,0,0,33.8,0.932,44,0
+5,115,76,0,0,31.2,0.343,44,1
+9,152,78,34,171,34.2,0.893,33,1
+7,178,84,0,0,39.9,0.331,41,1
+1,130,70,13,105,25.9,0.472,22,0
+1,95,74,21,73,25.9,0.673,36,0
+1,0,68,35,0,32,0.389,22,0
+5,122,86,0,0,34.7,0.29,33,0
+8,95,72,0,0,36.8,0.485,57,0
+8,126,88,36,108,38.5,0.349,49,0
+1,139,46,19,83,28.7,0.654,22,0
+3,116,0,0,0,23.5,0.187,23,0
+3,99,62,19,74,21.8,0.279,26,0
+5,0,80,32,0,41,0.346,37,1
+4,92,80,0,0,42.2,0.237,29,0
+4,137,84,0,0,31.2,0.252,30,0
+3,61,82,28,0,34.4,0.243,46,0
+1,90,62,12,43,27.2,0.58,24,0
+3,90,78,0,0,42.7,0.559,21,0
+9,165,88,0,0,30.4,0.302,49,1
+1,125,50,40,167,33.3,0.962,28,1
+13,129,0,30,0,39.9,0.569,44,1
+12,88,74,40,54,35.3,0.378,48,0
+1,196,76,36,249,36.5,0.875,29,1
+5,189,64,33,325,31.2,0.583,29,1
+5,158,70,0,0,29.8,0.207,63,0
+5,103,108,37,0,39.2,0.305,65,0
+4,146,78,0,0,38.5,0.52,67,1
+4,147,74,25,293,34.9,0.385,30,0
+5,99,54,28,83,34,0.499,30,0
+6,124,72,0,0,27.6,0.368,29,1
+0,101,64,17,0,21,0.252,21,0
+3,81,86,16,66,27.5,0.306,22,0
+1,133,102,28,140,32.8,0.234,45,1
+3,173,82,48,465,38.4,2.137,25,1
+0,118,64,23,89,0,1.731,21,0
+0,84,64,22,66,35.8,0.545,21,0
+2,105,58,40,94,34.9,0.225,25,0
+2,122,52,43,158,36.2,0.816,28,0
+12,140,82,43,325,39.2,0.528,58,1
+0,98,82,15,84,25.2,0.299,22,0
+1,87,60,37,75,37.2,0.509,22,0
+4,156,75,0,0,48.3,0.238,32,1
+0,93,100,39,72,43.4,1.021,35,0
+1,107,72,30,82,30.8,0.821,24,0
+0,105,68,22,0,20,0.236,22,0
+1,109,60,8,182,25.4,0.947,21,0
+1,90,62,18,59,25.1,1.268,25,0
+1,125,70,24,110,24.3,0.221,25,0
+1,119,54,13,50,22.3,0.205,24,0
+5,116,74,29,0,32.3,0.66,35,1
+8,105,100,36,0,43.3,0.239,45,1
+5,144,82,26,285,32,0.452,58,1
+3,100,68,23,81,31.6,0.949,28,0
+1,100,66,29,196,32,0.444,42,0
+5,166,76,0,0,45.7,0.34,27,1
+1,131,64,14,415,23.7,0.389,21,0
+4,116,72,12,87,22.1,0.463,37,0
+4,158,78,0,0,32.9,0.803,31,1
+2,127,58,24,275,27.7,1.6,25,0
+3,96,56,34,115,24.7,0.944,39,0
+0,131,66,40,0,34.3,0.196,22,1
+3,82,70,0,0,21.1,0.389,25,0
+3,193,70,31,0,34.9,0.241,25,1
+4,95,64,0,0,32,0.161,31,1
+6,137,61,0,0,24.2,0.151,55,0
+5,136,84,41,88,35,0.286,35,1
+9,72,78,25,0,31.6,0.28,38,0
+5,168,64,0,0,32.9,0.135,41,1
+2,123,48,32,165,42.1,0.52,26,0
+4,115,72,0,0,28.9,0.376,46,1
+0,101,62,0,0,21.9,0.336,25,0
+8,197,74,0,0,25.9,1.191,39,1
+1,172,68,49,579,42.4,0.702,28,1
+6,102,90,39,0,35.7,0.674,28,0
+1,112,72,30,176,34.4,0.528,25,0
+1,143,84,23,310,42.4,1.076,22,0
+1,143,74,22,61,26.2,0.256,21,0
+0,138,60,35,167,34.6,0.534,21,1
+3,173,84,33,474,35.7,0.258,22,1
+1,97,68,21,0,27.2,1.095,22,0
+4,144,82,32,0,38.5,0.554,37,1
+1,83,68,0,0,18.2,0.624,27,0
+3,129,64,29,115,26.4,0.219,28,1
+1,119,88,41,170,45.3,0.507,26,0
+2,94,68,18,76,26,0.561,21,0
+0,102,64,46,78,40.6,0.496,21,0
+2,115,64,22,0,30.8,0.421,21,0
+8,151,78,32,210,42.9,0.516,36,1
+4,184,78,39,277,37,0.264,31,1
+0,94,0,0,0,0,0.256,25,0
+1,181,64,30,180,34.1,0.328,38,1
+0,135,94,46,145,40.6,0.284,26,0
+1,95,82,25,180,35,0.233,43,1
+2,99,0,0,0,22.2,0.108,23,0
+3,89,74,16,85,30.4,0.551,38,0
+1,80,74,11,60,30,0.527,22,0
+2,139,75,0,0,25.6,0.167,29,0
+1,90,68,8,0,24.5,1.138,36,0
+0,141,0,0,0,42.4,0.205,29,1
+12,140,85,33,0,37.4,0.244,41,0
+5,147,75,0,0,29.9,0.434,28,0
+1,97,70,15,0,18.2,0.147,21,0
+6,107,88,0,0,36.8,0.727,31,0
+0,189,104,25,0,34.3,0.435,41,1
+2,83,66,23,50,32.2,0.497,22,0
+4,117,64,27,120,33.2,0.23,24,0
+8,108,70,0,0,30.5,0.955,33,1
+4,117,62,12,0,29.7,0.38,30,1
+0,180,78,63,14,59.4,2.42,25,1
+1,100,72,12,70,25.3,0.658,28,0
+0,95,80,45,92,36.5,0.33,26,0
+0,104,64,37,64,33.6,0.51,22,1
+0,120,74,18,63,30.5,0.285,26,0
+1,82,64,13,95,21.2,0.415,23,0
+2,134,70,0,0,28.9,0.542,23,1
+0,91,68,32,210,39.9,0.381,25,0
+2,119,0,0,0,19.6,0.832,72,0
+2,100,54,28,105,37.8,0.498,24,0
+14,175,62,30,0,33.6,0.212,38,1
+1,135,54,0,0,26.7,0.687,62,0
+5,86,68,28,71,30.2,0.364,24,0
+10,148,84,48,237,37.6,1.001,51,1
+9,134,74,33,60,25.9,0.46,81,0
+9,120,72,22,56,20.8,0.733,48,0
+1,71,62,0,0,21.8,0.416,26,0
+8,74,70,40,49,35.3,0.705,39,0
+5,88,78,30,0,27.6,0.258,37,0
+10,115,98,0,0,24,1.022,34,0
+0,124,56,13,105,21.8,0.452,21,0
+0,74,52,10,36,27.8,0.269,22,0
+0,97,64,36,100,36.8,0.6,25,0
+8,120,0,0,0,30,0.183,38,1
+6,154,78,41,140,46.1,0.571,27,0
+1,144,82,40,0,41.3,0.607,28,0
+0,137,70,38,0,33.2,0.17,22,0
+0,119,66,27,0,38.8,0.259,22,0
+7,136,90,0,0,29.9,0.21,50,0
+4,114,64,0,0,28.9,0.126,24,0
+0,137,84,27,0,27.3,0.231,59,0
+2,105,80,45,191,33.7,0.711,29,1
+7,114,76,17,110,23.8,0.466,31,0
+8,126,74,38,75,25.9,0.162,39,0
+4,132,86,31,0,28,0.419,63,0
+3,158,70,30,328,35.5,0.344,35,1
+0,123,88,37,0,35.2,0.197,29,0
+4,85,58,22,49,27.8,0.306,28,0
+0,84,82,31,125,38.2,0.233,23,0
+0,145,0,0,0,44.2,0.63,31,1
+0,135,68,42,250,42.3,0.365,24,1
+1,139,62,41,480,40.7,0.536,21,0
+0,173,78,32,265,46.5,1.159,58,0
+4,99,72,17,0,25.6,0.294,28,0
+8,194,80,0,0,26.1,0.551,67,0
+2,83,65,28,66,36.8,0.629,24,0
+2,89,90,30,0,33.5,0.292,42,0
+4,99,68,38,0,32.8,0.145,33,0
+4,125,70,18,122,28.9,1.144,45,1
+3,80,0,0,0,0,0.174,22,0
+6,166,74,0,0,26.6,0.304,66,0
+5,110,68,0,0,26,0.292,30,0
+2,81,72,15,76,30.1,0.547,25,0
+7,195,70,33,145,25.1,0.163,55,1
+6,154,74,32,193,29.3,0.839,39,0
+2,117,90,19,71,25.2,0.313,21,0
+3,84,72,32,0,37.2,0.267,28,0
+6,0,68,41,0,39,0.727,41,1
+7,94,64,25,79,33.3,0.738,41,0
+3,96,78,39,0,37.3,0.238,40,0
+10,75,82,0,0,33.3,0.263,38,0
+0,180,90,26,90,36.5,0.314,35,1
+1,130,60,23,170,28.6,0.692,21,0
+2,84,50,23,76,30.4,0.968,21,0
+8,120,78,0,0,25,0.409,64,0
+12,84,72,31,0,29.7,0.297,46,1
+0,139,62,17,210,22.1,0.207,21,0
+9,91,68,0,0,24.2,0.2,58,0
+2,91,62,0,0,27.3,0.525,22,0
+3,99,54,19,86,25.6,0.154,24,0
+3,163,70,18,105,31.6,0.268,28,1
+9,145,88,34,165,30.3,0.771,53,1
+7,125,86,0,0,37.6,0.304,51,0
+13,76,60,0,0,32.8,0.18,41,0
+6,129,90,7,326,19.6,0.582,60,0
+2,68,70,32,66,25,0.187,25,0
+3,124,80,33,130,33.2,0.305,26,0
+6,114,0,0,0,0,0.189,26,0
+9,130,70,0,0,34.2,0.652,45,1
+3,125,58,0,0,31.6,0.151,24,0
+3,87,60,18,0,21.8,0.444,21,0
+1,97,64,19,82,18.2,0.299,21,0
+3,116,74,15,105,26.3,0.107,24,0
+0,117,66,31,188,30.8,0.493,22,0
+0,111,65,0,0,24.6,0.66,31,0
+2,122,60,18,106,29.8,0.717,22,0
+0,107,76,0,0,45.3,0.686,24,0
+1,86,66,52,65,41.3,0.917,29,0
+6,91,0,0,0,29.8,0.501,31,0
+1,77,56,30,56,33.3,1.251,24,0
+4,132,0,0,0,32.9,0.302,23,1
+0,105,90,0,0,29.6,0.197,46,0
+0,57,60,0,0,21.7,0.735,67,0
+0,127,80,37,210,36.3,0.804,23,0
+3,129,92,49,155,36.4,0.968,32,1
+8,100,74,40,215,39.4,0.661,43,1
+3,128,72,25,190,32.4,0.549,27,1
+10,90,85,32,0,34.9,0.825,56,1
+4,84,90,23,56,39.5,0.159,25,0
+1,88,78,29,76,32,0.365,29,0
+8,186,90,35,225,34.5,0.423,37,1
+5,187,76,27,207,43.6,1.034,53,1
+4,131,68,21,166,33.1,0.16,28,0
+1,164,82,43,67,32.8,0.341,50,0
+4,189,110,31,0,28.5,0.68,37,0
+1,116,70,28,0,27.4,0.204,21,0
+3,84,68,30,106,31.9,0.591,25,0
+6,114,88,0,0,27.8,0.247,66,0
+1,88,62,24,44,29.9,0.422,23,0
+1,84,64,23,115,36.9,0.471,28,0
+7,124,70,33,215,25.5,0.161,37,0
+1,97,70,40,0,38.1,0.218,30,0
+8,110,76,0,0,27.8,0.237,58,0
+11,103,68,40,0,46.2,0.126,42,0
+11,85,74,0,0,30.1,0.3,35,0
+6,125,76,0,0,33.8,0.121,54,1
+0,198,66,32,274,41.3,0.502,28,1
+1,87,68,34,77,37.6,0.401,24,0
+6,99,60,19,54,26.9,0.497,32,0
+0,91,80,0,0,32.4,0.601,27,0
+2,95,54,14,88,26.1,0.748,22,0
+1,99,72,30,18,38.6,0.412,21,0
+6,92,62,32,126,32,0.085,46,0
+4,154,72,29,126,31.3,0.338,37,0
+0,121,66,30,165,34.3,0.203,33,1
+3,78,70,0,0,32.5,0.27,39,0
+2,130,96,0,0,22.6,0.268,21,0
+3,111,58,31,44,29.5,0.43,22,0
+2,98,60,17,120,34.7,0.198,22,0
+1,143,86,30,330,30.1,0.892,23,0
+1,119,44,47,63,35.5,0.28,25,0
+6,108,44,20,130,24,0.813,35,0
+2,118,80,0,0,42.9,0.693,21,1
+10,133,68,0,0,27,0.245,36,0
+2,197,70,99,0,34.7,0.575,62,1
+0,151,90,46,0,42.1,0.371,21,1
+6,109,60,27,0,25,0.206,27,0
+12,121,78,17,0,26.5,0.259,62,0
+8,100,76,0,0,38.7,0.19,42,0
+8,124,76,24,600,28.7,0.687,52,1
+1,93,56,11,0,22.5,0.417,22,0
+8,143,66,0,0,34.9,0.129,41,1
+6,103,66,0,0,24.3,0.249,29,0
+3,176,86,27,156,33.3,1.154,52,1
+0,73,0,0,0,21.1,0.342,25,0
+11,111,84,40,0,46.8,0.925,45,1
+2,112,78,50,140,39.4,0.175,24,0
+3,132,80,0,0,34.4,0.402,44,1
+2,82,52,22,115,28.5,1.699,25,0
+6,123,72,45,230,33.6,0.733,34,0
+0,188,82,14,185,32,0.682,22,1
+0,67,76,0,0,45.3,0.194,46,0
+1,89,24,19,25,27.8,0.559,21,0
+1,173,74,0,0,36.8,0.088,38,1
+1,109,38,18,120,23.1,0.407,26,0
+1,108,88,19,0,27.1,0.4,24,0
+6,96,0,0,0,23.7,0.19,28,0
+1,124,74,36,0,27.8,0.1,30,0
+7,150,78,29,126,35.2,0.692,54,1
+4,183,0,0,0,28.4,0.212,36,1
+1,124,60,32,0,35.8,0.514,21,0
+1,181,78,42,293,40,1.258,22,1
+1,92,62,25,41,19.5,0.482,25,0
+0,152,82,39,272,41.5,0.27,27,0
+1,111,62,13,182,24,0.138,23,0
+3,106,54,21,158,30.9,0.292,24,0
+3,174,58,22,194,32.9,0.593,36,1
+7,168,88,42,321,38.2,0.787,40,1
+6,105,80,28,0,32.5,0.878,26,0
+11,138,74,26,144,36.1,0.557,50,1
+3,106,72,0,0,25.8,0.207,27,0
+6,117,96,0,0,28.7,0.157,30,0
+2,68,62,13,15,20.1,0.257,23,0
+9,112,82,24,0,28.2,1.282,50,1
+0,119,0,0,0,32.4,0.141,24,1
+2,112,86,42,160,38.4,0.246,28,0
+2,92,76,20,0,24.2,1.698,28,0
+6,183,94,0,0,40.8,1.461,45,0
+0,94,70,27,115,43.5,0.347,21,0
+2,108,64,0,0,30.8,0.158,21,0
+4,90,88,47,54,37.7,0.362,29,0
+0,125,68,0,0,24.7,0.206,21,0
+0,132,78,0,0,32.4,0.393,21,0
+5,128,80,0,0,34.6,0.144,45,0
+4,94,65,22,0,24.7,0.148,21,0
+7,114,64,0,0,27.4,0.732,34,1
+0,102,78,40,90,34.5,0.238,24,0
+2,111,60,0,0,26.2,0.343,23,0
+1,128,82,17,183,27.5,0.115,22,0
+10,92,62,0,0,25.9,0.167,31,0
+13,104,72,0,0,31.2,0.465,38,1
+5,104,74,0,0,28.8,0.153,48,0
+2,94,76,18,66,31.6,0.649,23,0
+7,97,76,32,91,40.9,0.871,32,1
+1,100,74,12,46,19.5,0.149,28,0
+0,102,86,17,105,29.3,0.695,27,0
+4,128,70,0,0,34.3,0.303,24,0
+6,147,80,0,0,29.5,0.178,50,1
+4,90,0,0,0,28,0.61,31,0
+3,103,72,30,152,27.6,0.73,27,0
+2,157,74,35,440,39.4,0.134,30,0
+1,167,74,17,144,23.4,0.447,33,1
+0,179,50,36,159,37.8,0.455,22,1
+11,136,84,35,130,28.3,0.26,42,1
+0,107,60,25,0,26.4,0.133,23,0
+1,91,54,25,100,25.2,0.234,23,0
+1,117,60,23,106,33.8,0.466,27,0
+5,123,74,40,77,34.1,0.269,28,0
+2,120,54,0,0,26.8,0.455,27,0
+1,106,70,28,135,34.2,0.142,22,0
+2,155,52,27,540,38.7,0.24,25,1
+2,101,58,35,90,21.8,0.155,22,0
+1,120,80,48,200,38.9,1.162,41,0
+11,127,106,0,0,39,0.19,51,0
+3,80,82,31,70,34.2,1.292,27,1
+10,162,84,0,0,27.7,0.182,54,0
+1,199,76,43,0,42.9,1.394,22,1
+8,167,106,46,231,37.6,0.165,43,1
+9,145,80,46,130,37.9,0.637,40,1
+6,115,60,39,0,33.7,0.245,40,1
+1,112,80,45,132,34.8,0.217,24,0
+4,145,82,18,0,32.5,0.235,70,1
+10,111,70,27,0,27.5,0.141,40,1
+6,98,58,33,190,34,0.43,43,0
+9,154,78,30,100,30.9,0.164,45,0
+6,165,68,26,168,33.6,0.631,49,0
+1,99,58,10,0,25.4,0.551,21,0
+10,68,106,23,49,35.5,0.285,47,0
+3,123,100,35,240,57.3,0.88,22,0
+8,91,82,0,0,35.6,0.587,68,0
+6,195,70,0,0,30.9,0.328,31,1
+9,156,86,0,0,24.8,0.23,53,1
+0,93,60,0,0,35.3,0.263,25,0
+3,121,52,0,0,36,0.127,25,1
+2,101,58,17,265,24.2,0.614,23,0
+2,56,56,28,45,24.2,0.332,22,0
+0,162,76,36,0,49.6,0.364,26,1
+0,95,64,39,105,44.6,0.366,22,0
+4,125,80,0,0,32.3,0.536,27,1
+5,136,82,0,0,0,0.64,69,0
+2,129,74,26,205,33.2,0.591,25,0
+3,130,64,0,0,23.1,0.314,22,0
+1,107,50,19,0,28.3,0.181,29,0
+1,140,74,26,180,24.1,0.828,23,0
+1,144,82,46,180,46.1,0.335,46,1
+8,107,80,0,0,24.6,0.856,34,0
+13,158,114,0,0,42.3,0.257,44,1
+2,121,70,32,95,39.1,0.886,23,0
+7,129,68,49,125,38.5,0.439,43,1
+2,90,60,0,0,23.5,0.191,25,0
+7,142,90,24,480,30.4,0.128,43,1
+3,169,74,19,125,29.9,0.268,31,1
+0,99,0,0,0,25,0.253,22,0
+4,127,88,11,155,34.5,0.598,28,0
+4,118,70,0,0,44.5,0.904,26,0
+2,122,76,27,200,35.9,0.483,26,0
+6,125,78,31,0,27.6,0.565,49,1
+1,168,88,29,0,35,0.905,52,1
+2,129,0,0,0,38.5,0.304,41,0
+4,110,76,20,100,28.4,0.118,27,0
+6,80,80,36,0,39.8,0.177,28,0
+10,115,0,0,0,0,0.261,30,1
+2,127,46,21,335,34.4,0.176,22,0
+9,164,78,0,0,32.8,0.148,45,1
+2,93,64,32,160,38,0.674,23,1
+3,158,64,13,387,31.2,0.295,24,0
+5,126,78,27,22,29.6,0.439,40,0
+10,129,62,36,0,41.2,0.441,38,1
+0,134,58,20,291,26.4,0.352,21,0
+3,102,74,0,0,29.5,0.121,32,0
+7,187,50,33,392,33.9,0.826,34,1
+3,173,78,39,185,33.8,0.97,31,1
+10,94,72,18,0,23.1,0.595,56,0
+1,108,60,46,178,35.5,0.415,24,0
+5,97,76,27,0,35.6,0.378,52,1
+4,83,86,19,0,29.3,0.317,34,0
+1,114,66,36,200,38.1,0.289,21,0
+1,149,68,29,127,29.3,0.349,42,1
+5,117,86,30,105,39.1,0.251,42,0
+1,111,94,0,0,32.8,0.265,45,0
+4,112,78,40,0,39.4,0.236,38,0
+1,116,78,29,180,36.1,0.496,25,0
+0,141,84,26,0,32.4,0.433,22,0
+2,175,88,0,0,22.9,0.326,22,0
+2,92,52,0,0,30.1,0.141,22,0
+3,130,78,23,79,28.4,0.323,34,1
+8,120,86,0,0,28.4,0.259,22,1
+2,174,88,37,120,44.5,0.646,24,1
+2,106,56,27,165,29,0.426,22,0
+2,105,75,0,0,23.3,0.56,53,0
+4,95,60,32,0,35.4,0.284,28,0
+0,126,86,27,120,27.4,0.515,21,0
+8,65,72,23,0,32,0.6,42,0
+2,99,60,17,160,36.6,0.453,21,0
+1,102,74,0,0,39.5,0.293,42,1
+11,120,80,37,150,42.3,0.785,48,1
+3,102,44,20,94,30.8,0.4,26,0
+1,109,58,18,116,28.5,0.219,22,0
+9,140,94,0,0,32.7,0.734,45,1
+13,153,88,37,140,40.6,1.174,39,0
+12,100,84,33,105,30,0.488,46,0
+1,147,94,41,0,49.3,0.358,27,1
+1,81,74,41,57,46.3,1.096,32,0
+3,187,70,22,200,36.4,0.408,36,1
+6,162,62,0,0,24.3,0.178,50,1
+4,136,70,0,0,31.2,1.182,22,1
+1,121,78,39,74,39,0.261,28,0
+3,108,62,24,0,26,0.223,25,0
+0,181,88,44,510,43.3,0.222,26,1
+8,154,78,32,0,32.4,0.443,45,1
+1,128,88,39,110,36.5,1.057,37,1
+7,137,90,41,0,32,0.391,39,0
+0,123,72,0,0,36.3,0.258,52,1
+1,106,76,0,0,37.5,0.197,26,0
+6,190,92,0,0,35.5,0.278,66,1
+2,88,58,26,16,28.4,0.766,22,0
+9,170,74,31,0,44,0.403,43,1
+9,89,62,0,0,22.5,0.142,33,0
+10,101,76,48,180,32.9,0.171,63,0
+2,122,70,27,0,36.8,0.34,27,0
+5,121,72,23,112,26.2,0.245,30,0
+1,126,60,0,0,30.1,0.349,47,1
+1,93,70,31,0,30.4,0.315,23,0
\ No newline at end of file
diff --git a/src/test/resources/neuralnets_classification_label.txt b/src/test/resources/neuralnets_classification_label.txt
new file mode 100644
index 0000000..e1b6789
--- /dev/null
+++ b/src/test/resources/neuralnets_classification_label.txt
Binary files differ
diff --git a/src/test/resources/neuralnets_classification_test.txt b/src/test/resources/neuralnets_classification_test.txt
new file mode 100644
index 0000000..b19107d
--- /dev/null
+++ b/src/test/resources/neuralnets_classification_test.txt
Binary files differ
diff --git a/src/test/resources/neuralnets_classification_training.txt b/src/test/resources/neuralnets_classification_training.txt
new file mode 100644
index 0000000..405fb69
--- /dev/null
+++ b/src/test/resources/neuralnets_classification_training.txt
@@ -0,0 +1,668 @@
+0.35294117647058826,0.7437185929648241,0.5901639344262295,0.35353535353535354,0.0,0.5007451564828614,0.23441502988898377,0.48333333333333334,1.0
+0.058823529411764705,0.4271356783919598,0.5409836065573771,0.29292929292929293,0.0,0.3964232488822653,0.11656703672075147,0.16666666666666666,0.0
+0.47058823529411764,0.9195979899497487,0.5245901639344263,0.0,0.0,0.34724292101341286,0.2536293766011956,0.18333333333333332,1.0
+0.058823529411764705,0.4472361809045226,0.5409836065573771,0.23232323232323232,0.1111111111111111,0.41877794336810736,0.038001707941929974,0.0,0.0
+0.0,0.6884422110552764,0.32786885245901637,0.35353535353535354,0.19858156028368795,0.6423248882265277,0.9436379163108454,0.2,1.0
+0.29411764705882354,0.5829145728643216,0.6065573770491803,0.0,0.0,0.3815201192250373,0.052519214346712216,0.15,0.0
+0.17647058823529413,0.39195979899497485,0.4098360655737705,0.32323232323232326,0.10401891252955082,0.4619970193740686,0.07258753202391117,0.08333333333333333,1.0
+0.5882352941176471,0.5778894472361809,0.0,0.0,0.0,0.526080476900149,0.023911187019641334,0.13333333333333333,0.0
+0.11764705882352941,0.9899497487437185,0.5737704918032787,0.45454545454545453,0.6418439716312057,0.4545454545454546,0.034158838599487616,0.5333333333333333,1.0
+0.47058823529411764,0.628140703517588,0.7868852459016393,0.0,0.0,0.0,0.06575576430401367,0.55,1.0
+0.23529411764705882,0.5527638190954773,0.7540983606557377,0.0,0.0,0.5603576751117736,0.04824935952177626,0.15,0.0
+0.5882352941176471,0.8442211055276382,0.6065573770491803,0.0,0.0,0.5663189269746647,0.1959863364645602,0.21666666666666667,1.0
+0.5882352941176471,0.6984924623115578,0.6557377049180327,0.0,0.0,0.40387481371087935,0.5819812126387702,0.6,0.0
+0.058823529411764705,0.949748743718593,0.4918032786885246,0.23232323232323232,1.0,0.4485842026825634,0.13663535439795046,0.6333333333333333,1.0
+0.29411764705882354,0.8341708542713567,0.5901639344262295,0.1919191919191919,0.20685579196217493,0.3845007451564829,0.21733561058923997,0.5,1.0
+0.4117647058823529,0.5025125628140703,0.0,0.0,0.0,0.44709388971684055,0.17335610589239964,0.18333333333333332,1.0
+0.0,0.592964824120603,0.6885245901639344,0.47474747474747475,0.2718676122931442,0.6825633383010432,0.20196413321947054,0.16666666666666666,1.0
+0.4117647058823529,0.5376884422110553,0.6065573770491803,0.0,0.0,0.4411326378539494,0.07514944491887275,0.16666666666666666,1.0
+0.058823529411764705,0.5175879396984925,0.2459016393442623,0.3838383838383838,0.09810874704491726,0.6453055141579732,0.04483347566182749,0.2,0.0
+0.058823529411764705,0.5778894472361809,0.5737704918032787,0.30303030303030304,0.11347517730496454,0.5156482861400895,0.19257045260461145,0.18333333333333332,1.0
+0.17647058823529413,0.6331658291457286,0.7213114754098361,0.41414141414141414,0.2777777777777778,0.5856929955290611,0.2672929120409906,0.1,0.0
+0.47058823529411764,0.49748743718592964,0.6885245901639344,0.0,0.0,0.5275707898658718,0.13236549957301452,0.48333333333333334,0.0
+0.4117647058823529,0.9849246231155779,0.7377049180327869,0.0,0.0,0.5931445603576752,0.159265584970111,0.3333333333333333,1.0
+0.5294117647058824,0.5979899497487438,0.6557377049180327,0.35353535353535354,0.0,0.43219076005961254,0.07899231426131512,0.13333333333333333,1.0
+0.6470588235294118,0.7185929648241206,0.7704918032786885,0.3333333333333333,0.17257683215130024,0.5454545454545455,0.07514944491887275,0.5,1.0
+0.5882352941176471,0.628140703517588,0.5737704918032787,0.26262626262626265,0.1359338061465721,0.4634873323397914,0.05422715627668659,0.3333333333333333,1.0
+0.4117647058823529,0.7386934673366834,0.6229508196721312,0.0,0.0,0.587183308494784,0.07643040136635354,0.36666666666666664,1.0
+0.058823529411764705,0.48743718592964824,0.5409836065573771,0.15151515151515152,0.16548463356973994,0.34575260804769004,0.17463706233988044,0.016666666666666666,0.0
+0.7647058823529411,0.7286432160804021,0.6721311475409836,0.1919191919191919,0.13002364066193853,0.330849478390462,0.07130657557643039,0.6,0.0
+0.29411764705882354,0.5879396984924623,0.7540983606557377,0.0,0.0,0.5081967213114754,0.11058923996584116,0.2833333333333333,0.0
+0.29411764705882354,0.5477386934673367,0.6147540983606558,0.26262626262626265,0.0,0.5365126676602087,0.19982920580700256,0.65,0.0
+0.17647058823529413,0.7939698492462312,0.6229508196721312,0.36363636363636365,0.2895981087470449,0.4709388971684054,0.3300597779675491,0.11666666666666667,1.0
+0.17647058823529413,0.44221105527638194,0.47540983606557374,0.1111111111111111,0.06382978723404255,0.3695976154992549,0.0807002561912895,0.016666666666666666,0.0
+0.35294117647058826,0.4623115577889447,0.7540983606557377,0.0,0.0,0.2965722801788376,0.04696840307429547,0.11666666666666667,0.0
+0.5882352941176471,0.6130653266331658,0.639344262295082,0.31313131313131315,0.0,0.41132637853949333,0.1853116994022203,0.4,0.0
+0.23529411764705882,0.5175879396984925,0.4918032786885246,0.3333333333333333,0.22695035460992907,0.35767511177347244,0.37916310845431256,0.2,0.0
+0.6470588235294118,0.6934673366834171,0.6229508196721312,0.0,0.0,0.4947839046199703,0.14602903501280956,0.23333333333333334,0.0
+0.5294117647058824,0.5125628140703518,0.6229508196721312,0.37373737373737376,0.0,0.4903129657228018,0.2506404782237404,0.4166666666666667,1.0
+0.11764705882352941,0.45226130653266333,0.5573770491803278,0.42424242424242425,0.0,0.5692995529061103,0.18146883005977796,0.1,1.0
+0.23529411764705882,0.5577889447236181,0.5901639344262295,0.47474747474747475,0.24468085106382978,0.5529061102831595,0.5602049530315968,0.5833333333333334,1.0
+0.17647058823529413,0.9045226130653267,0.5245901639344263,0.25252525252525254,0.08274231678486997,0.5067064083457526,0.08240819812126388,0.08333333333333333,0.0
+0.4117647058823529,0.6683417085427136,0.6885245901639344,0.0,0.0,0.5991058122205664,0.2638770281810418,0.26666666666666666,0.0
+0.4117647058823529,0.5326633165829145,0.7540983606557377,0.18181818181818182,0.0,0.338301043219076,0.06703672075149443,0.45,0.0
+0.5294117647058824,0.8592964824120602,0.9016393442622951,0.24242424242424243,0.28368794326241137,0.676602086438152,0.2745516652433817,0.55,1.0
+0.4117647058823529,0.7989949748743719,0.5245901639344263,0.0,0.0,0.40834575260804773,0.09222886421861655,0.31666666666666665,0.0
+0.0,0.9045226130653267,0.5409836065573771,0.3939393939393939,0.0,0.6259314456035768,0.7749786507258752,0.06666666666666667,1.0
+0.058823529411764705,0.7336683417085427,0.45901639344262296,0.0,0.0,0.4426229508196722,0.20751494449188723,0.13333333333333333,0.0
+0.11764705882352941,0.35678391959798994,0.5737704918032787,0.2727272727272727,0.0,0.41728763040238454,0.21690862510674636,0.016666666666666666,0.0
+0.4117647058823529,0.5175879396984925,0.5409836065573771,0.32323232323232326,0.0,0.5827123695976155,0.1135781383432963,0.16666666666666666,1.0
+0.4117647058823529,0.5276381909547738,0.0,0.0,0.0,0.0,0.0969257045260461,0.05,0.0
+0.058823529411764705,0.5175879396984925,0.6557377049180327,0.1111111111111111,0.09692671394799054,0.28912071535022354,0.1763450042698548,0.016666666666666666,0.0
+0.058823529411764705,0.507537688442211,0.4098360655737705,0.15151515151515152,0.0425531914893617,0.36065573770491804,0.19128949615713065,0.08333333333333333,0.0
+0.29411764705882354,0.44221105527638194,0.5409836065573771,0.21212121212121213,0.027186761229314422,0.36363636363636365,0.11272416737830913,0.15,0.0
+0.47058823529411764,0.8844221105527639,0.7377049180327869,0.3434343434343434,0.3546099290780142,0.5022354694485843,0.16609735269000853,0.6166666666666667,1.0
+0.4117647058823529,0.7537688442211056,0.5409836065573771,0.42424242424242425,0.40425531914893614,0.5171385991058123,0.27327070879590093,0.35,0.0
+0.058823529411764705,0.36683417085427134,0.4098360655737705,0.10101010101010101,0.0,0.34277198211624443,0.07258753202391117,0.0,0.0
+0.4117647058823529,0.9396984924623115,0.5573770491803278,0.3939393939393939,0.35933806146572106,0.5618479880774964,0.07514944491887275,0.3333333333333333,1.0
+0.0,0.5025125628140703,0.7213114754098361,0.6060606060606061,0.13002364066193853,0.6974664679582713,0.37745516652433814,0.16666666666666666,0.0
+0.0,0.7336683417085427,0.6721311475409836,0.0,0.0,0.6035767511177348,0.7271562766865926,0.38333333333333336,0.0
+0.0,0.5276381909547738,0.5245901639344263,0.41414141414141414,0.16784869976359337,0.6184798807749627,0.04056362083689154,0.016666666666666666,0.0
+0.11764705882352941,0.4221105527638191,0.0,0.0,0.0,0.0,0.0964987190435525,0.0,0.0
+0.47058823529411764,0.6683417085427136,0.5901639344262295,0.0,0.0,0.4903129657228018,0.08198121263877028,0.3,1.0
+0.29411764705882354,0.22110552763819097,0.5081967213114754,0.0,0.0,0.37257824143070045,0.21733561058923997,0.25,0.0
+0.11764705882352941,0.7085427135678392,0.47540983606557374,0.3434343434343434,0.15130023640661938,0.37853949329359166,0.26515798462852264,0.05,0.0
+0.4117647058823529,0.5728643216080402,0.5409836065573771,0.0,0.0,0.488822652757079,0.07685738684884713,0.35,1.0
+0.29411764705882354,0.49748743718592964,0.6065573770491803,0.2727272727272727,0.0,0.43219076005961254,0.0533731853116994,0.18333333333333332,0.0
+0.0,0.5477386934673367,0.7213114754098361,0.30303030303030304,0.0,0.4843517138599106,0.33176771989752346,0.2833333333333333,1.0
+0.11764705882352941,0.5477386934673367,0.7540983606557377,0.0,0.0,0.6363636363636365,0.32749786507258755,0.55,0.0
+0.058823529411764705,0.47738693467336685,0.5409836065573771,0.13131313131313133,0.04491725768321513,0.2921013412816692,0.10930828351836037,0.06666666666666667,0.0
+0.23529411764705882,0.7336683417085427,0.6967213114754098,0.2727272727272727,0.1182033096926714,0.4307004470938897,0.04739538855678907,0.1,0.0
+0.11764705882352941,0.5025125628140703,0.5409836065573771,0.20202020202020202,0.10638297872340426,0.4903129657228018,0.3368915456874466,0.11666666666666667,1.0
+0.29411764705882354,0.6984924623115578,0.5245901639344263,0.35353535353535354,0.16548463356973994,0.4262295081967214,0.14218616567036718,0.08333333333333333,0.0
+0.7647058823529411,0.6331658291457286,0.7377049180327869,0.0,0.0,0.646795827123696,0.21562766865926558,0.35,1.0
+0.23529411764705882,0.6482412060301508,0.7049180327868853,0.20202020202020202,0.3191489361702128,0.5230998509687035,0.06532877882152008,0.03333333333333333,0.0
+0.058823529411764705,0.3969849246231156,0.6147540983606558,0.30303030303030304,0.0,0.4769001490312966,0.13578138343296328,0.016666666666666666,0.0
+0.058823529411764705,0.0,0.39344262295081966,0.20202020202020202,0.0,0.3681073025335321,0.026473099914602907,0.016666666666666666,0.0
+0.4117647058823529,0.31155778894472363,0.639344262295082,0.0,0.0,0.48584202682563343,0.1336464560204953,0.3333333333333333,0.0
+0.29411764705882354,0.47738693467336685,0.5901639344262295,0.3333333333333333,0.0,0.5618479880774964,0.12467976088812979,0.1,0.0
+0.0,0.6582914572864321,0.0,0.0,0.0,0.6438152011922504,0.08198121263877028,0.08333333333333333,1.0
+0.11764705882352941,0.5628140703517588,0.5409836065573771,0.2222222222222222,0.0,0.37257824143070045,0.0977796754910333,0.05,0.0
+0.17647058823529413,0.5678391959798995,0.36065573770491804,0.13131313131313133,0.0,0.33383010432190763,0.026473099914602907,0.016666666666666666,0.0
+0.11764705882352941,0.37185929648241206,0.0,0.0,0.0,0.0,0.010247651579846282,0.016666666666666666,0.0
+0.4117647058823529,0.41708542713567837,0.639344262295082,0.26262626262626265,0.08392434988179669,0.436661698956781,0.29419299743808713,0.25,0.0
+0.0,0.507537688442211,0.5327868852459017,0.2828282828282828,0.0,0.3666169895678093,0.06789069171648163,0.016666666666666666,0.0
+0.29411764705882354,0.6884422110552764,0.8852459016393442,0.0,0.0,0.7272727272727273,0.0636208368915457,0.26666666666666666,1.0
+0.11764705882352941,0.5527638190954773,0.6065573770491803,0.29292929292929293,0.14775413711583923,0.4828614008941878,0.26473099914602904,0.1,0.0
+0.7647058823529411,0.5326633165829145,0.5901639344262295,0.5454545454545454,0.0,0.5454545454545455,0.042698548249359515,0.4,0.0
+0.11764705882352941,0.5025125628140703,0.5573770491803278,0.25252525252525254,0.08392434988179669,0.5737704918032788,0.10503842869342442,0.08333333333333333,0.0
+0.8823529411764706,0.6834170854271356,0.5737704918032787,0.32323232323232326,0.13002364066193853,0.5529061102831595,0.03202391118701964,0.36666666666666664,1.0
+0.058823529411764705,0.5376884422110553,0.5573770491803278,0.1919191919191919,0.0,0.3949329359165425,0.037147736976942784,0.05,0.0
+0.058823529411764705,0.4020100502512563,0.45081967213114754,0.0,0.0,0.28464977645305517,0.07685738684884713,0.0,0.0
+0.23529411764705882,0.6180904522613065,0.6557377049180327,0.15151515151515152,0.20803782505910165,0.4769001490312966,0.15584970111016225,0.21666666666666667,0.0
+0.4117647058823529,0.40703517587939697,0.639344262295082,0.40404040404040403,0.05673758865248227,0.6959761549925485,0.07813834329632792,0.35,0.0
+0.23529411764705882,0.6733668341708543,0.5901639344262295,0.0,0.0,0.35469448584202684,0.08497011101622545,0.65,1.0
+0.11764705882352941,0.7135678391959799,0.6721311475409836,0.18181818181818182,0.07565011820330969,0.3681073025335321,0.29163108454312553,0.0,0.0
+0.35294117647058826,0.7236180904522613,0.5901639344262295,0.2727272727272727,0.2695035460992908,0.5052160953800299,0.07557643040136634,0.31666666666666665,0.0
+0.11764705882352941,0.4623115577889447,0.5081967213114754,0.2828282828282828,0.0,0.4709388971684054,0.02220324508966695,0.05,0.0
+0.058823529411764705,0.35678391959798994,0.39344262295081966,0.18181818181818182,0.08983451536643026,0.30402384500745155,0.10461144321093083,0.016666666666666666,0.0
+0.35294117647058826,0.46733668341708545,0.4098360655737705,0.30303030303030304,0.07565011820330969,0.4277198211624441,0.11870196413321946,0.03333333333333333,0.0
+0.058823529411764705,0.6130653266331658,0.7377049180327869,0.5151515151515151,0.26004728132387706,0.7406855439642326,0.10546541417591801,0.16666666666666666,1.0
+0.058823529411764705,0.8190954773869347,0.5901639344262295,0.0,0.0,0.5812220566318927,0.48847139197267286,0.2,1.0
+0.058823529411764705,0.7587939698492462,0.4918032786885246,0.0,0.0,0.38897168405365135,0.04312553373185311,0.016666666666666666,0.0
+0.0,0.628140703517588,0.7868852459016393,0.0,0.0,0.33532041728763046,0.07856532877882151,0.0,0.0
+0.058823529411764705,0.40703517587939697,0.5901639344262295,0.18181818181818182,0.04728132387706856,0.3964232488822653,0.087532023911187,0.05,0.0
+0.11764705882352941,0.4271356783919598,0.5327868852459017,0.0,0.0,0.5901639344262296,0.36379163108454315,0.1,0.0
+0.058823529411764705,0.6331658291457286,0.45901639344262296,0.29292929292929293,0.17966903073286053,0.4277198211624441,0.30871050384286935,0.0,0.0
+0.058823529411764705,0.4824120603015075,1.0,0.0,0.0,0.33383010432190763,0.055081127241673786,0.1,0.0
+0.23529411764705882,0.7236180904522613,0.47540983606557374,0.2828282828282828,0.16548463356973994,0.4396423248882266,0.08923996584116138,0.26666666666666666,0.0
+0.17647058823529413,0.41708542713567837,0.47540983606557374,0.31313131313131315,0.02127659574468085,0.511177347242921,0.11016225448334757,0.06666666666666667,0.0
+0.0,0.47738693467336685,0.6967213114754098,0.25252525252525254,0.0425531914893617,0.5573770491803279,0.07216054654141758,0.05,1.0
+0.17647058823529413,0.8592964824120602,0.5901639344262295,0.3333333333333333,0.1595744680851064,0.496274217585693,0.051665243381725026,0.05,1.0
+0.47058823529411764,0.7788944723618091,0.5081967213114754,0.26262626262626265,0.5851063829787234,0.5067064083457526,0.19854824935952178,0.4166666666666667,1.0
+0.058823529411764705,0.4472361809045226,0.6229508196721312,0.3434343434343434,0.04373522458628842,0.46497764530551416,0.04867634500426986,0.03333333333333333,0.0
+0.23529411764705882,0.38190954773869346,0.5081967213114754,0.0,0.0,0.5067064083457526,0.1336464560204953,0.06666666666666667,0.0
+0.4117647058823529,0.8040201005025126,0.4426229508196721,0.32323232323232326,0.20685579196217493,0.4545454545454546,0.21776259607173357,0.3,1.0
+0.23529411764705882,0.7336683417085427,0.7540983606557377,0.0,0.0,0.46497764530551416,0.1968403074295474,0.6666666666666666,1.0
+0.29411764705882354,0.6231155778894473,0.6065573770491803,0.0,0.0,0.5067064083457526,0.06063193851409052,0.2833333333333333,1.0
+0.29411764705882354,0.39195979899497485,0.39344262295081966,0.0,0.0,0.5022354694485843,0.24594363791631085,0.06666666666666667,0.0
+0.23529411764705882,0.48743718592964824,0.4918032786885246,0.23232323232323232,0.0,0.42026825633383014,0.15584970111016225,0.016666666666666666,0.0
+0.23529411764705882,0.49748743718592964,0.6229508196721312,0.15151515151515152,0.06028368794326241,0.34575260804769004,0.06191289496157131,0.0,0.0
+0.0,0.8140703517587939,0.6229508196721312,0.5656565656565656,0.1182033096926714,0.7928464977645306,0.29077711357813835,0.06666666666666667,1.0
+0.35294117647058826,0.5577889447236181,0.5245901639344263,0.3939393939393939,0.0,0.5096870342771983,0.07771135781383433,0.05,0.0
+0.11764705882352941,0.5376884422110553,0.6065573770491803,0.30303030303030304,0.1182033096926714,0.5007451564828614,0.13919726729291204,0.03333333333333333,0.0
+0.29411764705882354,0.6633165829145728,0.6557377049180327,0.0,0.0,0.3994038748137109,0.04611443210930828,0.8,0.0
+0.0,0.5678391959798995,0.6229508196721312,0.0,0.0,0.496274217585693,0.08539709649871904,0.03333333333333333,1.0
+0.058823529411764705,0.44221105527638194,0.2459016393442623,0.42424242424242425,0.11702127659574468,0.819672131147541,0.1784799316823228,0.08333333333333333,1.0
+0.17647058823529413,0.6030150753768844,0.5737704918032787,0.30303030303030304,0.1595744680851064,0.639344262295082,0.1596925704526046,0.15,0.0
+0.058823529411764705,0.592964824120603,0.47540983606557374,0.36363636363636365,0.1111111111111111,0.496274217585693,0.07813834329632792,0.03333333333333333,0.0
+0.058823529411764705,0.5879396984924623,0.7213114754098361,0.24242424242424243,0.17139479905437352,0.5141579731743666,0.13877028181041845,0.31666666666666665,1.0
+0.0,0.5276381909547738,0.6885245901639344,0.0,0.0,0.4157973174366617,0.28309137489325364,0.6833333333333333,1.0
+0.23529411764705882,0.8693467336683417,0.5737704918032787,0.1414141414141414,0.19858156028368795,0.4426229508196722,0.12083689154568743,0.2,1.0
+0.5294117647058824,0.6130653266331658,0.45901639344262296,0.0,0.0,0.496274217585693,0.44235695986336465,0.2,1.0
+0.17647058823529413,0.8542713567839196,0.5245901639344263,0.37373737373737376,0.26595744680851063,0.5141579731743666,0.11870196413321946,0.15,1.0
+0.47058823529411764,0.4221105527638191,0.6065573770491803,0.31313131313131315,0.0,0.5707898658718331,0.16182749786507258,0.3,0.0
+0.11764705882352941,0.4824120603015075,0.5573770491803278,0.13131313131313133,0.057919621749408984,0.31445603576751124,0.24295473953885569,0.08333333333333333,0.0
+0.11764705882352941,0.628140703517588,0.4918032786885246,0.20202020202020202,0.16548463356973994,0.503725782414307,0.00426985482493595,0.16666666666666666,0.0
+0.0,0.5025125628140703,0.5737704918032787,0.26262626262626265,0.0591016548463357,0.459016393442623,0.22160546541417592,0.0,0.0
+0.0,0.46733668341708545,0.4918032786885246,0.25252525252525254,0.10874704491725769,0.4277198211624441,0.19385140905209222,0.016666666666666666,0.0
+0.0,0.6482412060301508,0.6557377049180327,0.0,0.0,0.46497764530551416,0.266865926558497,0.13333333333333333,0.0
+0.29411764705882354,0.5276381909547738,0.5901639344262295,0.29292929292929293,0.38416075650118203,0.5499254843517138,0.034585824081981215,0.11666666666666667,0.0
+0.17647058823529413,0.6432160804020101,0.639344262295082,0.0,0.0,0.31445603576751124,0.08112724167378309,0.5666666666666667,0.0
+0.29411764705882354,0.5326633165829145,0.6721311475409836,0.30303030303030304,0.0,0.5886736214605067,0.08881298035866779,0.2833333333333333,0.0
+0.11764705882352941,0.542713567839196,0.4262295081967213,0.26262626262626265,0.07446808510638298,0.4843517138599106,0.10247651579846284,0.016666666666666666,0.0
+0.5882352941176471,0.542713567839196,0.5409836065573771,0.0,0.0,0.4828614008941878,0.08283518360375747,0.35,1.0
+0.23529411764705882,0.7738693467336684,0.5081967213114754,0.31313131313131315,0.33569739952718675,0.488822652757079,0.06789069171648163,0.03333333333333333,0.0
+0.0,0.5125628140703518,0.6147540983606558,0.23232323232323232,0.0,0.0,0.210930828351836,0.0,0.0
+0.5294117647058824,0.2864321608040201,0.6557377049180327,0.37373737373737376,0.0,0.488822652757079,0.007685738684884714,0.3333333333333333,0.0
+0.11764705882352941,0.5326633165829145,0.5245901639344263,0.35353535353535354,0.14066193853427897,0.4545454545454546,0.5644748078565328,0.21666666666666667,0.0
+0.29411764705882354,0.7386934673366834,0.639344262295082,0.0,0.0,0.5022354694485843,0.059777967549103334,0.7333333333333333,0.0
+0.11764705882352941,0.45226130653266333,0.5737704918032787,0.1717171717171717,0.0,0.40685543964232496,0.002988898377455169,0.016666666666666666,0.0
+0.058823529411764705,0.6834170854271356,0.6065573770491803,0.5050505050505051,0.24113475177304963,0.5573770491803279,0.13706233988044406,0.05,0.0
+0.23529411764705882,0.5728643216080402,0.5327868852459017,0.0,0.0,0.3263785394932936,0.15115286080273269,0.26666666666666666,0.0
+0.5294117647058824,0.7839195979899497,0.7049180327868853,0.2828282828282828,0.18321513002364065,0.511177347242921,0.4743808710503843,0.35,1.0
+0.058823529411764705,0.7688442211055276,0.6721311475409836,0.42424242424242425,0.5732860520094563,0.6050670640834576,0.26003415883859954,0.03333333333333333,0.0
+0.47058823529411764,0.9447236180904522,0.639344262295082,0.0,0.0,0.7138599105812221,0.025192143467122122,0.36666666666666664,1.0
+0.4117647058823529,0.7638190954773869,0.7213114754098361,0.4444444444444444,0.0,0.7451564828614009,0.11058923996584116,0.25,1.0
+0.11764705882352941,0.49748743718592964,0.4262295081967213,0.15151515151515152,0.1111111111111111,0.3666169895678093,0.23868488471391974,0.0,0.0
+0.058823529411764705,0.5477386934673367,0.45901639344262296,0.21212121212121213,0.1595744680851064,0.37555886736214605,0.3223740392826644,0.03333333333333333,0.0
+0.11764705882352941,0.44221105527638194,0.6065573770491803,0.1919191919191919,0.06264775413711583,0.43219076005961254,0.06447480785653288,0.016666666666666666,0.0
+1.0,0.8190954773869347,0.5901639344262295,0.41414141414141414,0.1347517730496454,0.609538002980626,0.3155422715627669,0.43333333333333335,1.0
+0.23529411764705882,0.7587939698492462,0.7377049180327869,0.3838383838383838,0.0,0.4426229508196722,0.09222886421861655,0.25,0.0
+0.4117647058823529,0.5125628140703518,0.6065573770491803,0.40404040404040403,0.12411347517730496,0.5543964232488824,0.053800170794193,0.4,0.0
+0.0,0.5728643216080402,0.6557377049180327,0.3434343434343434,0.33687943262411346,0.6587183308494785,0.038001707941929974,0.1,0.0
+0.11764705882352941,0.5025125628140703,0.5245901639344263,0.23232323232323232,0.0,0.4426229508196722,0.1238257899231426,0.0,0.0
+0.0,0.6582914572864321,0.7213114754098361,0.0,0.0,0.4709388971684054,0.2839453458582408,0.18333333333333332,1.0
+0.35294117647058826,0.5226130653266332,0.6065573770491803,0.18181818181818182,0.18439716312056736,0.4456035767511177,0.2749786507258753,0.3333333333333333,1.0
+0.17647058823529413,0.7437185929648241,0.5409836065573771,0.25252525252525254,0.0,0.4843517138599106,0.07600341588385995,0.016666666666666666,0.0
+0.23529411764705882,0.6030150753768844,0.5573770491803278,0.0,0.0,0.4411326378539494,0.26942783945345855,0.21666666666666667,0.0
+0.23529411764705882,0.5527638190954773,0.5409836065573771,0.0,0.0,0.4754098360655738,0.1678052946199829,0.13333333333333333,0.0
+0.17647058823529413,0.5577889447236181,0.7377049180327869,0.12121212121212122,0.09219858156028368,0.42324888226527574,0.1780529461998292,0.13333333333333333,0.0
+0.35294117647058826,0.5125628140703518,0.6721311475409836,0.0,0.0,0.459016393442623,0.043552519214346705,0.25,1.0
+0.35294117647058826,0.6733668341708543,0.5737704918032787,0.23232323232323232,0.1536643026004728,0.5275707898658718,0.1981212638770282,0.13333333333333333,1.0
+0.11764705882352941,0.4371859296482412,0.0,0.23232323232323232,0.0,0.4307004470938897,0.2967549103330487,0.06666666666666667,0.0
+0.058823529411764705,0.3969849246231156,0.4918032786885246,0.42424242424242425,0.05673758865248227,0.6482861400894189,0.25619128949615716,0.03333333333333333,0.0
+0.11764705882352941,0.3768844221105528,0.5245901639344263,0.24242424242424243,0.06501182033096926,0.4426229508196722,0.12467976088812979,0.2,0.0
+0.47058823529411764,0.8994974874371859,0.5901639344262295,0.42424242424242425,0.1536643026004728,0.48733233979135626,0.2736976942783945,0.25,1.0
+0.35294117647058826,0.4271356783919598,0.639344262295082,0.0,0.0,0.46497764530551416,0.12980358667805295,0.35,0.0
+0.0,0.6482412060301508,0.9016393442622951,0.46464646464646464,0.1536643026004728,1.0,0.10290350128095645,0.08333333333333333,1.0
+0.29411764705882354,0.7185929648241206,0.639344262295082,0.0,0.0,0.6706408345752609,0.04782237403928266,0.43333333333333335,0.0
+0.29411764705882354,0.6532663316582915,0.6721311475409836,0.0,0.0,0.5827123695976155,0.3748932536293766,0.26666666666666666,1.0
+0.35294117647058826,0.4371859296482412,0.6557377049180327,0.0,0.0,0.34575260804769004,0.0025619128949615736,0.18333333333333332,0.0
+0.0,0.5979899497487438,0.5245901639344263,0.18181818181818182,0.10874704491725769,0.5201192250372578,0.2762596071733561,0.03333333333333333,0.0
+0.058823529411764705,0.0,0.6065573770491803,0.20202020202020202,0.027186761229314422,0.4128166915052161,0.09436379163108453,0.0,0.0
+0.29411764705882354,0.36683417085427134,0.4918032786885246,0.0,0.0,0.3994038748137109,0.08112724167378309,0.1,0.0
+0.23529411764705882,0.7085427135678392,0.6065573770491803,0.0,0.0,0.41132637853949333,0.0708795900939368,0.31666666666666665,0.0
+0.4117647058823529,0.9748743718592965,0.5573770491803278,0.2828282828282828,0.0,0.5350223546944859,0.284799316823228,0.3333333333333333,1.0
+0.47058823529411764,0.9095477386934674,0.5573770491803278,0.36363636363636365,0.5851063829787234,0.4485842026825634,0.22929120409906065,0.65,1.0
+0.058823529411764705,0.6432160804020101,0.8032786885245902,0.41414141414141414,0.06855791962174941,0.4769001490312966,0.5307429547395388,0.2,1.0
+0.47058823529411764,0.5477386934673367,0.6229508196721312,0.3939393939393939,0.1347517730496454,0.4157973174366617,0.23996584116140052,0.16666666666666666,1.0
+0.29411764705882354,0.6984924623115578,0.6557377049180327,0.35353535353535354,0.18912529550827423,0.4709388971684054,0.12083689154568743,0.06666666666666667,1.0
+0.17647058823529413,0.5577889447236181,0.5081967213114754,0.0,0.0,0.33681073025335323,0.027327070879590087,0.0,0.0
+0.5294117647058824,0.6180904522613065,0.5737704918032787,0.4444444444444444,0.1111111111111111,0.49329359165424747,0.1263877028181042,0.31666666666666665,0.0
+0.4117647058823529,0.7989949748743719,0.5409836065573771,0.0,0.0,0.45305514157973176,0.13023057216054654,0.25,1.0
+0.6470588235294118,0.678391959798995,0.0,0.0,0.0,0.7794336810730254,0.21349274124679757,0.31666666666666665,1.0
+0.47058823529411764,0.4271356783919598,0.45081967213114754,0.20202020202020202,0.0,0.36363636363636365,0.024765157984628527,0.35,0.0
+0.29411764705882354,0.7939698492462312,0.6885245901639344,0.41414141414141414,0.24822695035460993,0.587183308494784,0.1353543979504697,0.13333333333333333,1.0
+0.058823529411764705,0.5276381909547738,0.47540983606557374,0.0,0.0,0.3621460506706409,0.04654141759180188,0.0,0.0
+0.17647058823529413,0.5376884422110553,0.5081967213114754,0.13131313131313133,0.05673758865248227,0.3412816691505216,0.25619128949615716,0.03333333333333333,1.0
+0.23529411764705882,0.5477386934673367,0.5245901639344263,0.4444444444444444,0.11702127659574468,0.518628912071535,0.35311699402220326,0.08333333333333333,1.0
+0.23529411764705882,0.7437185929648241,0.4918032786885246,0.2727272727272727,0.375886524822695,0.4605067064083458,0.030742954739538853,0.13333333333333333,1.0
+0.0,0.5678391959798995,0.6557377049180327,0.16161616161616163,0.0,0.4619970193740686,0.3398804440649018,0.0,0.0
+0.058823529411764705,0.6934673366834171,0.6721311475409836,0.0,0.0,0.5976154992548436,0.06746370623398804,0.11666666666666667,0.0
+0.0,0.542713567839196,0.5573770491803278,0.20202020202020202,0.0,0.40685543964232496,0.302732707087959,0.18333333333333332,0.0
+0.11764705882352941,0.49748743718592964,0.5737704918032787,0.16161616161616163,0.05200945626477541,0.30402384500745155,0.06703672075149443,0.1,0.0
+0.35294117647058826,0.5175879396984925,0.5901639344262295,0.32323232323232326,0.22458628841607564,0.5618479880774964,0.10503842869342442,0.5666666666666667,0.0
+0.29411764705882354,0.5577889447236181,0.5901639344262295,0.2828282828282828,0.0,0.3561847988077496,0.14047822374039282,0.1,0.0
+0.47058823529411764,0.9849246231155779,0.6229508196721312,0.29292929292929293,0.3309692671394799,0.5588673621460507,0.22502134927412468,0.6,1.0
+0.29411764705882354,0.8140703517587939,0.8524590163934426,0.0,0.0,0.5618479880774964,0.031169940222032448,0.5166666666666667,1.0
+0.058823529411764705,0.4824120603015075,0.5245901639344263,0.2727272727272727,0.10283687943262411,0.4947839046199703,0.09009393680614858,0.0,0.0
+0.4117647058823529,0.9246231155778895,0.6885245901639344,0.3333333333333333,0.0,0.5290611028315947,0.11827497865072585,0.3333333333333333,1.0
+0.11764705882352941,0.40703517587939697,0.4918032786885246,0.2222222222222222,0.0,0.4128166915052161,0.09052092228864217,0.06666666666666667,0.0
+0.0,0.7386934673366834,0.6967213114754098,0.5454545454545454,0.0,0.6378539493293591,0.12681468830059778,0.05,0.0
+0.4117647058823529,0.8994974874371859,0.7786885245901639,0.31313131313131315,0.0,0.5096870342771983,0.03672075149444919,0.65,0.0
+0.0,0.7035175879396985,0.5327868852459017,0.26262626262626265,0.1536643026004728,0.6348733233979137,0.1507258753202391,0.05,1.0
+0.5294117647058824,0.5628140703517588,0.6721311475409836,0.32323232323232326,0.20685579196217493,0.5096870342771983,0.07771135781383433,0.25,1.0
+0.7058823529411765,0.7587939698492462,0.5737704918032787,0.40404040404040403,0.3203309692671395,0.6229508196721312,0.28351836037574724,0.2833333333333333,1.0
+0.29411764705882354,0.5477386934673367,0.5081967213114754,0.41414141414141414,0.1524822695035461,0.533532041728763,0.18616567036720752,0.06666666666666667,1.0
+0.35294117647058826,0.628140703517588,0.5573770491803278,0.30303030303030304,0.14184397163120568,0.44709388971684055,0.16481639624252775,0.18333333333333332,0.0
+0.29411764705882354,0.4271356783919598,0.6065573770491803,0.2222222222222222,0.0,0.43219076005961254,0.48932536293766005,0.18333333333333332,1.0
+0.29411764705882354,0.5628140703517588,0.5409836065573771,0.0,0.0,0.5633383010432191,0.07813834329632792,0.3333333333333333,1.0
+0.0,0.8894472361809045,0.4918032786885246,0.29292929292929293,0.5650118203309693,0.5156482861400895,0.4244235695986337,0.0,1.0
+0.11764705882352941,0.7939698492462312,0.7377049180327869,0.0,0.0,0.4709388971684054,0.3104184457728438,0.75,1.0
+0.4117647058823529,0.5979899497487438,0.0,0.0,0.0,0.37555886736214605,0.055935098206660976,0.26666666666666666,0.0
+0.4117647058823529,0.7135678391959799,0.4918032786885246,0.3333333333333333,0.22458628841607564,0.42921013412816694,0.26003415883859954,0.6666666666666666,0.0
+0.058823529411764705,0.5025125628140703,0.5409836065573771,0.15151515151515152,0.06619385342789598,0.3517138599105813,0.251067463706234,0.08333333333333333,0.0
+0.058823529411764705,0.4371859296482412,0.639344262295082,0.2727272727272727,0.037825059101654845,0.5156482861400895,0.009820666097352692,0.016666666666666666,0.0
+0.0,0.507537688442211,0.6229508196721312,0.0,0.0,0.5320417287630403,0.05123825789923143,0.08333333333333333,0.0
+0.17647058823529413,0.8140703517587939,0.4262295081967213,0.3838383838383838,0.0,0.5543964232488824,0.24508966695132367,0.05,1.0
+0.23529411764705882,0.9899497487437185,0.5737704918032787,0.3939393939393939,0.8794326241134752,0.5469448584202683,0.9611443210930829,0.16666666666666666,0.0
+0.0,0.5879396984924623,0.6557377049180327,0.31313131313131315,0.06264775413711583,0.6736214605067065,0.004696840307429545,0.05,0.0
+0.23529411764705882,0.7135678391959799,0.7049180327868853,0.0,0.0,0.6557377049180328,0.2421007685738685,0.016666666666666666,1.0
+0.35294117647058826,0.6733668341708543,0.6557377049180327,0.37373737373737376,0.4373522458628842,0.6885245901639345,0.06831767719897522,0.4166666666666667,1.0
+0.058823529411764705,0.3969849246231156,0.6557377049180327,0.25252525252525254,0.04373522458628842,0.37853949329359166,0.21562766865926558,0.016666666666666666,0.0
+0.23529411764705882,0.6130653266331658,0.5573770491803278,0.0,0.0,0.5216095380029807,0.1349274124679761,0.13333333333333333,0.0
+0.17647058823529413,0.37185929648241206,0.5573770491803278,0.2828282828282828,0.05319148936170213,0.4426229508196722,0.09180187873612296,0.03333333333333333,0.0
+0.23529411764705882,0.8592964824120602,0.5901639344262295,0.0,0.0,0.6497764530551416,0.17122117847993165,0.08333333333333333,1.0
+0.4117647058823529,0.9095477386934674,0.6885245901639344,0.21212121212121213,0.22695035460992907,0.5350223546944859,0.21690862510674636,0.5,1.0
+0.0,0.8994974874371859,0.7377049180327869,0.2727272727272727,0.0,0.6572280178837556,0.25960717335610595,0.03333333333333333,1.0
+0.5294117647058824,0.8241206030150754,0.6885245901639344,0.21212121212121213,0.0,0.459016393442623,0.3215200683176772,0.18333333333333332,1.0
+0.0,0.5226130653266332,0.6229508196721312,0.0,0.0,0.27421758569299554,0.215200683176772,0.1,0.0
+0.058823529411764705,0.457286432160804,0.5245901639344263,0.24242424242424243,0.0,0.43517138599105815,0.04867634500426986,0.0,0.0
+0.23529411764705882,0.457286432160804,0.5737704918032787,0.32323232323232326,0.10401891252955082,0.49329359165424747,0.15713065755764302,0.016666666666666666,0.0
+0.17647058823529413,0.6984924623115578,0.4426229508196721,0.0,0.0,0.3815201192250373,0.13834329632792486,0.016666666666666666,1.0
+0.35294117647058826,0.5979899497487438,0.4098360655737705,0.2222222222222222,0.20803782505910165,0.40387481371087935,0.5294619982920581,0.2,1.0
+0.11764705882352941,0.7336683417085427,0.6229508196721312,0.35353535353535354,0.2293144208037825,0.5692995529061103,0.10717335610589239,0.13333333333333333,0.0
+0.5294117647058824,0.9246231155778895,0.6967213114754098,0.15151515151515152,0.0,0.44709388971684055,0.48462852263023054,0.4666666666666667,1.0
+0.5882352941176471,0.6130653266331658,0.5573770491803278,0.0,0.0,0.46497764530551416,0.07685738684884713,0.3333333333333333,0.0
+0.0,0.8291457286432161,0.7377049180327869,0.3333333333333333,0.8037825059101655,0.7794336810730254,0.14901793339026473,0.03333333333333333,0.0
+0.5294117647058824,0.6231155778894473,0.5737704918032787,0.3333333333333333,0.475177304964539,0.5275707898658718,0.08710503842869341,0.21666666666666667,0.0
+0.058823529411764705,0.5577889447236181,0.7049180327868853,0.1919191919191919,0.0,0.4485842026825634,0.02775405636208368,0.03333333333333333,0.0
+0.5294117647058824,0.5326633165829145,0.4262295081967213,0.0,0.0,0.46497764530551416,0.12894961571306574,0.35,0.0
+0.11764705882352941,0.6482412060301508,0.6885245901639344,0.0,0.0,0.41728763040238454,0.08795900939368059,0.1,0.0
+0.11764705882352941,0.45226130653266333,0.6557377049180327,0.1414141414141414,0.06501182033096926,0.36363636363636365,0.07301451750640478,0.05,0.0
+0.0,0.4321608040201005,0.5573770491803278,0.32323232323232326,0.0,0.533532041728763,0.06831767719897522,0.06666666666666667,0.0
+0.7058823529411765,0.4623115577889447,0.5081967213114754,0.0707070707070707,0.3049645390070922,0.41132637853949333,0.3620836891545688,0.38333333333333336,1.0
+0.058823529411764705,0.5678391959798995,0.5245901639344263,0.35353535353535354,0.0,0.5007451564828614,0.19854824935952178,0.0,1.0
+0.17647058823529413,0.5577889447236181,0.45901639344262296,0.3939393939393939,0.0,0.4485842026825634,0.20452604611443212,0.15,0.0
+0.11764705882352941,0.5728643216080402,0.5573770491803278,0.2222222222222222,0.0,0.4277198211624441,0.005977796754910332,0.06666666666666667,0.0
+0.058823529411764705,0.9698492462311558,0.4098360655737705,0.16161616161616163,0.4432624113475177,0.3859910581222057,0.24637062339880447,0.05,0.0
+0.6470588235294118,0.7788944723618091,0.6229508196721312,0.2828282828282828,0.1773049645390071,0.496274217585693,0.5444064901793338,0.5,1.0
+0.17647058823529413,0.9597989949748744,0.5573770491803278,0.15151515151515152,0.1536643026004728,0.4605067064083458,0.09436379163108453,0.21666666666666667,0.0
+0.17647058823529413,0.7085427135678392,0.0,0.0,0.0,0.44709388971684055,0.29163108454312553,0.1,1.0
+0.23529411764705882,0.47738693467336685,0.5737704918032787,0.32323232323232326,0.0,0.47839046199701946,0.22801024765157984,0.05,0.0
+0.17647058823529413,0.7135678391959799,0.6557377049180327,0.15151515151515152,0.0,0.4828614008941878,0.05209222886421862,0.7,0.0
+0.23529411764705882,0.6180904522613065,0.5081967213114754,0.0,0.0,0.4769001490312966,0.0631938514090521,0.23333333333333334,1.0
+0.29411764705882354,0.4824120603015075,0.6065573770491803,0.18181818181818182,0.07919621749408984,0.5007451564828614,0.392399658411614,0.36666666666666664,0.0
+0.0,0.6934673366834171,0.0,0.0,0.0,0.5409836065573771,0.36507258753202393,0.06666666666666667,1.0
+0.11764705882352941,0.6432160804020101,0.5245901639344263,0.42424242424242425,0.0,0.5961251862891208,0.43680614859094785,0.05,0.0
+0.0,0.5125628140703518,0.4262295081967213,0.0,0.0,0.3740685543964233,0.0,0.0,0.0
+0.11764705882352941,0.7336683417085427,0.0,0.0,0.0,0.4098360655737705,0.06917164816396242,0.11666666666666667,1.0
+0.5882352941176471,0.507537688442211,0.7049180327868853,0.37373737373737376,0.0,0.6795827123695977,0.45175064047822366,0.2833333333333333,1.0
+0.11764705882352941,0.542713567839196,0.5081967213114754,0.32323232323232326,0.06619385342789598,0.37555886736214605,0.02134927412467976,0.0,0.0
+0.17647058823529413,0.6130653266331658,0.639344262295082,0.0,0.0,0.34277198211624443,0.07514944491887275,0.31666666666666665,0.0
+0.058823529411764705,0.35678391959798994,0.639344262295082,0.5050505050505051,0.05319148936170213,0.4947839046199703,0.14688300597779674,0.0,0.0
+0.7647058823529411,0.5326633165829145,0.5737704918032787,0.0,0.0,0.5096870342771983,0.07386848847139196,0.5166666666666667,0.0
+0.11764705882352941,0.5025125628140703,0.5737704918032787,0.5252525252525253,0.0673758865248227,0.6035767511177348,0.25576430401366357,0.06666666666666667,0.0
+0.4117647058823529,0.5326633165829145,0.4918032786885246,0.24242424242424243,0.0,0.3949329359165425,0.09308283518360375,0.13333333333333333,1.0
+0.0,0.5226130653266332,0.5245901639344263,0.23232323232323232,0.13711583924349882,0.41430700447093893,0.1605465414175918,0.03333333333333333,0.0
+0.29411764705882354,0.5728643216080402,0.6065573770491803,0.0,0.0,0.3710879284649777,0.2843723313407344,0.6,0.0
+0.11764705882352941,0.542713567839196,0.5081967213114754,0.10101010101010101,0.32860520094562645,0.3770491803278689,0.34286934244235695,0.016666666666666666,0.0
+0.0,0.7336683417085427,0.5737704918032787,0.0,0.0,0.5648286140089419,0.10930828351836037,0.11666666666666667,1.0
+0.5882352941176471,0.6482412060301508,0.6229508196721312,0.2828282828282828,0.14420803782505912,0.5350223546944859,0.08625106746370624,0.3,0.0
+0.4117647058823529,0.6683417085427136,0.7213114754098361,0.15151515151515152,0.18321513002364065,0.4828614008941878,0.07856532877882151,0.26666666666666666,0.0
+0.4117647058823529,0.8090452261306532,0.7049180327868853,0.0,0.0,0.45305514157973176,0.037147736976942784,0.43333333333333335,1.0
+0.11764705882352941,0.542713567839196,0.6557377049180327,0.0,0.0,0.4023845007451565,0.07728437233134072,0.5166666666666667,1.0
+0.4117647058823529,0.6834170854271356,0.6065573770491803,0.26262626262626265,0.1595744680851064,0.3874813710879285,0.24295473953885569,0.5,0.0
+0.29411764705882354,0.7788944723618091,0.6885245901639344,0.4444444444444444,0.6442080378250591,0.5767511177347244,0.230999146029035,0.21666666666666667,0.0
+0.058823529411764705,0.5979899497487438,0.7049180327868853,0.3939393939393939,0.26004728132387706,0.6795827123695977,0.31169940222032455,0.13333333333333333,1.0
+0.23529411764705882,0.4824120603015075,0.45901639344262296,0.1717171717171717,0.057919621749408984,0.3099850968703428,0.11187019641332195,0.08333333333333333,0.0
+0.29411764705882354,0.542713567839196,0.5901639344262295,0.43434343434343436,0.08865248226950355,0.5380029806259315,0.07899231426131512,0.2,0.0
+0.0,0.39195979899497485,0.7213114754098361,0.29292929292929293,0.04728132387706856,0.5499254843517138,0.1520068317677199,0.0,0.0
+0.0,0.5376884422110553,0.5081967213114754,0.30303030303030304,0.08747044917257683,0.5454545454545455,0.28992314261315116,0.06666666666666667,1.0
+0.11764705882352941,0.6432160804020101,0.639344262295082,0.37373737373737376,0.21513002364066194,0.6453055141579732,0.48932536293766005,0.16666666666666666,1.0
+0.058823529411764705,0.6432160804020101,0.39344262295081966,0.45454545454545453,0.2293144208037825,0.6035767511177348,0.22843723313407344,0.05,1.0
+0.0,0.8090452261306532,0.4098360655737705,0.0,0.0,0.3263785394932936,0.07514944491887275,0.7333333333333333,0.0
+0.35294117647058826,0.7587939698492462,0.5081967213114754,0.31313131313131315,0.14184397163120568,0.5290611028315947,0.26216908625106744,0.11666666666666667,0.0
+0.11764705882352941,0.7336683417085427,0.5737704918032787,0.3838383838383838,0.425531914893617,0.41728763040238454,0.11058923996584116,0.13333333333333333,1.0
+0.0,0.6331658291457286,0.6885245901639344,0.29292929292929293,0.2541371158392435,0.4575260804769002,0.18872758326216907,0.05,0.0
+0.8235294117647058,0.5025125628140703,0.639344262295082,0.25252525252525254,0.21749408983451538,0.5454545454545455,0.14261315115286077,0.4166666666666667,1.0
+0.47058823529411764,0.5628140703517588,0.5901639344262295,0.0,0.0,0.3517138599105813,0.32536293766011953,0.6166666666666667,0.0
+0.0,0.8391959798994975,0.0,0.0,0.0,0.481371087928465,0.32493595217762594,0.15,1.0
+0.11764705882352941,0.7236180904522613,0.47540983606557374,0.3333333333333333,0.1595744680851064,0.4709388971684054,0.14688300597779674,0.06666666666666667,1.0
+0.29411764705882354,0.3869346733668342,0.6721311475409836,0.41414141414141414,0.04964539007092199,0.533532041728763,0.033304867634500426,0.23333333333333334,0.0
+0.29411764705882354,0.5778894472361809,0.8032786885245902,0.0,0.0,0.7883755588673622,0.055935098206660976,0.11666666666666667,1.0
+0.17647058823529413,0.7537688442211056,0.6229508196721312,0.0,0.0,0.3129657228017884,0.055081127241673786,0.26666666666666666,0.0
+0.11764705882352941,0.6030150753768844,0.6229508196721312,0.37373737373737376,0.12411347517730496,0.5916542473919524,0.058497011101622545,0.13333333333333333,0.0
+0.5882352941176471,0.8090452261306532,0.5573770491803278,0.23232323232323232,0.15602836879432624,0.3800298062593145,0.10589239965841162,0.43333333333333335,1.0
+0.0,0.6884422110552764,0.5573770491803278,0.1414141414141414,0.17494089834515367,0.3695976154992549,0.02775405636208368,0.0,0.0
+0.0,0.6432160804020101,0.5573770491803278,0.1919191919191919,0.2127659574468085,0.4545454545454546,0.5606319385140904,0.06666666666666667,1.0
+0.11764705882352941,0.6231155778894473,0.5573770491803278,0.2828282828282828,0.24231678486997635,0.4903129657228018,0.3403074295473954,0.15,1.0
+0.35294117647058826,0.4020100502512563,0.5409836065573771,0.30303030303030304,0.0,0.3904619970193741,0.10034158838599487,0.3333333333333333,0.0
+0.0,0.5326633165829145,0.5737704918032787,0.37373737373737376,0.17494089834515367,0.587183308494784,0.22502134927412468,0.016666666666666666,0.0
+0.11764705882352941,0.7788944723618091,0.6065573770491803,0.1717171717171717,0.11347517730496454,0.3964232488822653,0.15157984628522628,0.1,1.0
+0.17647058823529413,0.5678391959798995,0.4098360655737705,0.10101010101010101,0.10047281323877069,0.4396423248882266,0.23398804440649018,0.06666666666666667,0.0
+0.4117647058823529,0.5477386934673367,0.6557377049180327,0.31313131313131315,0.0,0.5350223546944859,0.44790777113578134,0.36666666666666664,1.0
+0.11764705882352941,0.5628140703517588,0.5573770491803278,0.2222222222222222,0.1111111111111111,0.5081967213114754,0.10119555935098205,0.08333333333333333,0.0
+0.17647058823529413,0.49748743718592964,0.6557377049180327,0.1111111111111111,0.07565011820330969,0.28763040238450077,0.08795900939368059,0.15,0.0
+0.17647058823529413,0.914572864321608,0.6065573770491803,0.0,0.0,0.4545454545454546,0.11400512382578991,0.13333333333333333,1.0
+0.17647058823529413,0.5778894472361809,0.5409836065573771,0.3939393939393939,0.16548463356973994,0.5678092399403876,0.030742954739538853,0.11666666666666667,0.0
+0.35294117647058826,0.9748743718592965,0.639344262295082,0.0,0.0,0.35022354694485847,0.021776259607173356,0.6333333333333333,1.0
+0.23529411764705882,0.6482412060301508,0.4918032786885246,0.12121212121212122,0.2730496453900709,0.4098360655737705,0.19171648163962424,0.16666666666666666,0.0
+0.17647058823529413,0.5628140703517588,0.6065573770491803,0.30303030303030304,0.0,0.4709388971684054,0.05081127241673783,0.06666666666666667,1.0
+0.0,0.6231155778894473,0.5737704918032787,0.20202020202020202,0.0,0.40834575260804773,0.07514944491887275,0.25,1.0
+0.7647058823529411,0.7638190954773869,0.7377049180327869,0.3333333333333333,0.034278959810874705,0.3994038748137109,0.2788215200683177,0.36666666666666664,1.0
+0.11764705882352941,0.5628140703517588,0.6147540983606558,0.32323232323232326,0.0,0.5320417287630403,0.02988898377455166,0.0,0.0
+0.058823529411764705,0.7889447236180904,0.5901639344262295,0.21212121212121213,0.19858156028368795,0.3815201192250373,0.019214346712211783,0.05,0.0
+0.058823529411764705,0.6130653266331658,0.5245901639344263,0.32323232323232326,0.18439716312056736,0.5230998509687035,0.26216908625106744,0.15,1.0
+0.5882352941176471,0.8994974874371859,0.5737704918032787,0.0,0.0,0.5230998509687035,0.05209222886421862,0.26666666666666666,0.0
+0.11764705882352941,0.5125628140703518,0.7049180327868853,0.36363636363636365,0.14184397163120568,0.6780923994038749,0.020922288642186166,0.03333333333333333,1.0
+0.35294117647058826,0.5276381909547738,0.5737704918032787,0.32323232323232326,0.08037825059101655,0.459016393442623,0.018787361229718188,0.26666666666666666,0.0
+0.47058823529411764,0.592964824120603,0.5901639344262295,0.1919191919191919,0.0,0.34426229508196726,0.5969257045260461,0.4166666666666667,0.0
+0.11764705882352941,0.4371859296482412,0.47540983606557374,0.16161616161616163,0.061465721040189124,0.48733233979135626,0.03757472245943638,0.06666666666666667,0.0
+0.058823529411764705,0.9045226130653267,0.0,0.0,0.0,0.6453055141579732,0.08710503842869341,0.3333333333333333,1.0
+0.7058823529411765,0.5326633165829145,0.6557377049180327,0.0,0.0,0.3517138599105813,0.025192143467122122,0.38333333333333336,0.0
+0.058823529411764705,0.47738693467336685,0.4918032786885246,0.18181818181818182,0.06855791962174941,0.3561847988077496,0.07771135781383433,0.016666666666666666,0.0
+0.0,0.8291457286432161,0.6229508196721312,0.43434343434343436,0.30141843971631205,0.7138599105812221,0.07728437233134072,0.08333333333333333,0.0
+0.0,0.5879396984924623,0.0,0.0,0.0,0.503725782414307,0.36464560204953034,0.38333333333333336,0.0
+0.29411764705882354,0.5778894472361809,0.6229508196721312,0.0,0.0,0.46497764530551416,0.11315115286080274,0.38333333333333336,1.0
+0.5294117647058824,0.7638190954773869,0.639344262295082,0.3434343434343434,0.20212765957446807,0.5096870342771983,0.3479931682322801,0.2,1.0
+0.4117647058823529,0.8944723618090452,0.6885245901639344,0.0,0.0,0.5946348733233979,0.10802732707087959,0.3333333333333333,1.0
+0.058823529411764705,0.6532663316582915,0.5737704918032787,0.13131313131313133,0.12411347517730496,0.3859910581222057,0.16823228010247648,0.016666666666666666,0.0
+0.058823529411764705,0.47738693467336685,0.6065573770491803,0.21212121212121213,0.08628841607565012,0.3859910581222057,0.2540563620836892,0.25,0.0
+0.058823529411764705,0.0,0.5573770491803278,0.35353535353535354,0.0,0.4769001490312966,0.1327924850555081,0.016666666666666666,0.0
+0.29411764705882354,0.6130653266331658,0.7049180327868853,0.0,0.0,0.5171385991058123,0.09052092228864217,0.2,0.0
+0.47058823529411764,0.47738693467336685,0.5901639344262295,0.0,0.0,0.5484351713859911,0.17378309137489323,0.6,0.0
+0.47058823529411764,0.6331658291457286,0.7213114754098361,0.36363636363636365,0.1276595744680851,0.5737704918032788,0.11571306575576429,0.4666666666666667,0.0
+0.058823529411764705,0.6984924623115578,0.3770491803278688,0.1919191919191919,0.09810874704491726,0.4277198211624441,0.24594363791631085,0.016666666666666666,0.0
+0.17647058823529413,0.5829145728643216,0.0,0.0,0.0,0.35022354694485847,0.04654141759180188,0.03333333333333333,0.0
+0.17647058823529413,0.49748743718592964,0.5081967213114754,0.1919191919191919,0.08747044917257683,0.3248882265275708,0.08582408198121264,0.08333333333333333,0.0
+0.29411764705882354,0.0,0.6557377049180327,0.32323232323232326,0.0,0.6110283159463488,0.1144321093082835,0.26666666666666666,1.0
+0.23529411764705882,0.4623115577889447,0.6557377049180327,0.0,0.0,0.6289120715350225,0.06789069171648163,0.13333333333333333,0.0
+0.23529411764705882,0.6884422110552764,0.6885245901639344,0.0,0.0,0.46497764530551416,0.07429547395388555,0.15,0.0
+0.17647058823529413,0.3065326633165829,0.6721311475409836,0.2828282828282828,0.0,0.5126676602086438,0.0704526046114432,0.4166666666666667,0.0
+0.058823529411764705,0.45226130653266333,0.5081967213114754,0.12121212121212122,0.0508274231678487,0.4053651266766021,0.21434671221178478,0.05,0.0
+0.17647058823529413,0.45226130653266333,0.639344262295082,0.0,0.0,0.6363636363636365,0.2053800170794193,0.0,0.0
+0.5294117647058824,0.8291457286432161,0.7213114754098361,0.0,0.0,0.45305514157973176,0.09564474807856531,0.4666666666666667,1.0
+0.058823529411764705,0.628140703517588,0.4098360655737705,0.40404040404040403,0.19739952718676124,0.496274217585693,0.37745516652433814,0.11666666666666667,1.0
+0.7647058823529411,0.6482412060301508,0.0,0.30303030303030304,0.0,0.5946348733233979,0.20964987190435522,0.38333333333333336,1.0
+0.7058823529411765,0.44221105527638194,0.6065573770491803,0.40404040404040403,0.06382978723404255,0.526080476900149,0.12809564474807855,0.45,0.0
+0.058823529411764705,0.9849246231155779,0.6229508196721312,0.36363636363636365,0.29432624113475175,0.5439642324888228,0.3403074295473954,0.13333333333333333,1.0
+0.29411764705882354,0.949748743718593,0.5245901639344263,0.3333333333333333,0.38416075650118203,0.46497764530551416,0.21562766865926558,0.13333333333333333,1.0
+0.29411764705882354,0.7939698492462312,0.5737704918032787,0.0,0.0,0.444113263785395,0.055081127241673786,0.7,0.0
+0.29411764705882354,0.5175879396984925,0.8852459016393442,0.37373737373737376,0.0,0.5842026825633384,0.0969257045260461,0.7333333333333333,0.0
+0.23529411764705882,0.7336683417085427,0.639344262295082,0.0,0.0,0.5737704918032788,0.18872758326216907,0.7666666666666667,1.0
+0.23529411764705882,0.7386934673366834,0.6065573770491803,0.25252525252525254,0.3463356973995272,0.5201192250372578,0.13108454312553372,0.15,0.0
+0.29411764705882354,0.49748743718592964,0.4426229508196721,0.2828282828282828,0.09810874704491726,0.5067064083457526,0.17976088812980356,0.15,0.0
+0.35294117647058826,0.6231155778894473,0.5901639344262295,0.0,0.0,0.41132637853949333,0.1238257899231426,0.13333333333333333,1.0
+0.0,0.507537688442211,0.5245901639344263,0.1717171717171717,0.0,0.3129657228017884,0.07429547395388555,0.0,0.0
+0.17647058823529413,0.40703517587939697,0.7049180327868853,0.16161616161616163,0.07801418439716312,0.4098360655737705,0.0973526900085397,0.016666666666666666,0.0
+0.058823529411764705,0.6683417085427136,0.8360655737704918,0.2828282828282828,0.16548463356973994,0.488822652757079,0.06660973526900087,0.4,1.0
+0.17647058823529413,0.8693467336683417,0.6721311475409836,0.48484848484848486,0.549645390070922,0.5722801788375559,0.8791631084543126,0.06666666666666667,1.0
+0.0,0.592964824120603,0.5245901639344263,0.23232323232323232,0.10520094562647754,0.0,0.7058070025619129,0.0,0.0
+0.0,0.4221105527638191,0.5245901639344263,0.2222222222222222,0.07801418439716312,0.533532041728763,0.19940222032450897,0.0,0.0
+0.11764705882352941,0.5276381909547738,0.47540983606557374,0.40404040404040403,0.1111111111111111,0.5201192250372578,0.0627668659265585,0.06666666666666667,0.0
+0.11764705882352941,0.6130653266331658,0.4262295081967213,0.43434343434343436,0.1867612293144208,0.5394932935916543,0.3151152860802733,0.11666666666666667,0.0
+0.7058823529411765,0.7035175879396985,0.6721311475409836,0.43434343434343436,0.38416075650118203,0.5842026825633384,0.19214346712211786,0.6166666666666667,1.0
+0.0,0.49246231155778897,0.6721311475409836,0.15151515151515152,0.09929078014184398,0.37555886736214605,0.09436379163108453,0.016666666666666666,0.0
+0.058823529411764705,0.4371859296482412,0.4918032786885246,0.37373737373737376,0.08865248226950355,0.5543964232488824,0.18403074295473953,0.016666666666666666,0.0
+0.23529411764705882,0.7839195979899497,0.6147540983606558,0.0,0.0,0.7198211624441133,0.06831767719897522,0.18333333333333332,1.0
+0.0,0.46733668341708545,0.819672131147541,0.3939393939393939,0.0851063829787234,0.646795827123696,0.40264730999146026,0.23333333333333334,0.0
+0.058823529411764705,0.5376884422110553,0.5901639344262295,0.30303030303030304,0.09692671394799054,0.459016393442623,0.31725021349274124,0.05,0.0
+0.0,0.5276381909547738,0.5573770491803278,0.2222222222222222,0.0,0.2980625931445604,0.06746370623398804,0.016666666666666666,0.0
+0.058823529411764705,0.5477386934673367,0.4918032786885246,0.08080808080808081,0.21513002364066194,0.37853949329359166,0.3710503842869342,0.0,0.0
+0.058823529411764705,0.45226130653266333,0.5081967213114754,0.18181818181818182,0.06973995271867613,0.3740685543964233,0.5081127241673783,0.06666666666666667,0.0
+0.058823529411764705,0.628140703517588,0.5737704918032787,0.24242424242424243,0.13002364066193853,0.3621460506706409,0.06105892399658412,0.06666666666666667,0.0
+0.058823529411764705,0.5979899497487438,0.4426229508196721,0.13131313131313133,0.0591016548463357,0.33233979135618485,0.05422715627668659,0.05,0.0
+0.29411764705882354,0.5829145728643216,0.6065573770491803,0.29292929292929293,0.0,0.481371087928465,0.24850555081127243,0.23333333333333334,1.0
+0.47058823529411764,0.5276381909547738,0.819672131147541,0.36363636363636365,0.0,0.6453055141579732,0.06874466268146882,0.4,1.0
+0.29411764705882354,0.7236180904522613,0.6721311475409836,0.26262626262626265,0.33687943262411346,0.4769001490312966,0.1596925704526046,0.6166666666666667,1.0
+0.17647058823529413,0.5025125628140703,0.5573770491803278,0.23232323232323232,0.09574468085106383,0.4709388971684054,0.3719043552519214,0.11666666666666667,0.0
+0.058823529411764705,0.5025125628140703,0.5409836065573771,0.29292929292929293,0.23167848699763594,0.4769001490312966,0.15627668659265584,0.35,0.0
+0.29411764705882354,0.8341708542713567,0.6229508196721312,0.0,0.0,0.6810730253353205,0.11187019641332195,0.1,1.0
+0.058823529411764705,0.6582914572864321,0.5245901639344263,0.1414141414141414,0.4905437352245863,0.35320417287630407,0.1327924850555081,0.0,0.0
+0.23529411764705882,0.5829145728643216,0.5901639344262295,0.12121212121212122,0.10283687943262411,0.32935916542473925,0.16438941076003416,0.26666666666666666,0.0
+0.23529411764705882,0.7939698492462312,0.639344262295082,0.0,0.0,0.4903129657228018,0.30956447480785654,0.16666666666666666,1.0
+0.11764705882352941,0.6381909547738693,0.47540983606557374,0.24242424242424243,0.32505910165484636,0.4128166915052161,0.6498719043552519,0.06666666666666667,0.0
+0.17647058823529413,0.4824120603015075,0.45901639344262296,0.3434343434343434,0.1359338061465721,0.3681073025335321,0.36976942783945344,0.3,0.0
+0.0,0.6582914572864321,0.5409836065573771,0.40404040404040403,0.0,0.511177347242921,0.05038428693424424,0.016666666666666666,1.0
+0.17647058823529413,0.4120603015075377,0.5737704918032787,0.0,0.0,0.31445603576751124,0.1327924850555081,0.06666666666666667,0.0
+0.17647058823529413,0.9698492462311558,0.5737704918032787,0.31313131313131315,0.0,0.5201192250372578,0.069598633646456,0.06666666666666667,1.0
+0.23529411764705882,0.47738693467336685,0.5245901639344263,0.0,0.0,0.4769001490312966,0.035439795046968404,0.16666666666666666,1.0
+0.35294117647058826,0.6884422110552764,0.5,0.0,0.0,0.36065573770491804,0.031169940222032448,0.5666666666666667,0.0
+0.29411764705882354,0.6834170854271356,0.6885245901639344,0.41414141414141414,0.10401891252955082,0.5216095380029807,0.08881298035866779,0.23333333333333334,1.0
+0.5294117647058824,0.36180904522613067,0.639344262295082,0.25252525252525254,0.0,0.4709388971684054,0.08625106746370624,0.2833333333333333,0.0
+0.29411764705882354,0.8442211055276382,0.5245901639344263,0.0,0.0,0.4903129657228018,0.02433817250213493,0.3333333333333333,1.0
+0.11764705882352941,0.6180904522613065,0.39344262295081966,0.32323232323232326,0.1950354609929078,0.6274217585692996,0.18872758326216907,0.08333333333333333,0.0
+0.23529411764705882,0.5778894472361809,0.5901639344262295,0.0,0.0,0.4307004470938897,0.12724167378309137,0.4166666666666667,1.0
+0.0,0.507537688442211,0.5081967213114754,0.0,0.0,0.3263785394932936,0.11016225448334757,0.06666666666666667,0.0
+0.47058823529411764,0.9899497487437185,0.6065573770491803,0.0,0.0,0.3859910581222057,0.4752348420153715,0.3,1.0
+0.058823529411764705,0.864321608040201,0.5573770491803278,0.494949494949495,0.6843971631205674,0.631892697466468,0.2664389410760034,0.11666666666666667,1.0
+0.35294117647058826,0.5125628140703518,0.7377049180327869,0.3939393939393939,0.0,0.5320417287630403,0.2544833475661828,0.11666666666666667,0.0
+0.058823529411764705,0.5628140703517588,0.5901639344262295,0.30303030303030304,0.20803782505910165,0.5126676602086438,0.19214346712211786,0.06666666666666667,0.0
+0.058823529411764705,0.7185929648241206,0.6885245901639344,0.23232323232323232,0.3664302600472813,0.631892697466468,0.42613151152860806,0.016666666666666666,0.0
+0.058823529411764705,0.7185929648241206,0.6065573770491803,0.2222222222222222,0.07210401891252956,0.3904619970193741,0.07600341588385995,0.0,0.0
+0.0,0.6934673366834171,0.4918032786885246,0.35353535353535354,0.19739952718676124,0.5156482861400895,0.19470538001707943,0.0,1.0
+0.17647058823529413,0.8693467336683417,0.6885245901639344,0.3333333333333333,0.5602836879432624,0.5320417287630403,0.07685738684884713,0.016666666666666666,1.0
+0.058823529411764705,0.48743718592964824,0.5573770491803278,0.21212121212121213,0.0,0.4053651266766021,0.4342442356959863,0.016666666666666666,0.0
+0.23529411764705882,0.7236180904522613,0.6721311475409836,0.32323232323232326,0.0,0.5737704918032788,0.20324508966695132,0.26666666666666666,1.0
+0.058823529411764705,0.41708542713567837,0.5573770491803278,0.0,0.0,0.27123695976154993,0.233134073441503,0.1,0.0
+0.17647058823529413,0.6482412060301508,0.5245901639344263,0.29292929292929293,0.1359338061465721,0.39344262295081966,0.06020495303159693,0.11666666666666667,1.0
+0.058823529411764705,0.5979899497487438,0.7213114754098361,0.41414141414141414,0.20094562647754138,0.6751117734724292,0.18317677198975235,0.08333333333333333,0.0
+0.11764705882352941,0.4723618090452261,0.5573770491803278,0.18181818181818182,0.08983451536643026,0.3874813710879285,0.2062339880444065,0.0,0.0
+0.0,0.5125628140703518,0.5245901639344263,0.46464646464646464,0.09219858156028368,0.6050670640834576,0.1784799316823228,0.0,0.0
+0.11764705882352941,0.5778894472361809,0.5245901639344263,0.2222222222222222,0.0,0.459016393442623,0.14645602049530315,0.0,0.0
+0.47058823529411764,0.7587939698492462,0.639344262295082,0.32323232323232326,0.24822695035460993,0.639344262295082,0.1870196413321947,0.25,1.0
+0.23529411764705882,0.9246231155778895,0.639344262295082,0.3939393939393939,0.32742316784869974,0.5514157973174367,0.07941929974380871,0.16666666666666666,1.0
+0.0,0.4723618090452261,0.0,0.0,0.0,0.0,0.07600341588385995,0.06666666666666667,0.0
+0.058823529411764705,0.9095477386934674,0.5245901639344263,0.30303030303030304,0.2127659574468085,0.5081967213114754,0.1067463706233988,0.2833333333333333,1.0
+0.0,0.678391959798995,0.7704918032786885,0.46464646464646464,0.17139479905437352,0.6050670640834576,0.08795900939368059,0.08333333333333333,0.0
+0.058823529411764705,0.47738693467336685,0.6721311475409836,0.25252525252525254,0.2127659574468085,0.5216095380029807,0.06618274978650726,0.36666666666666664,1.0
+0.11764705882352941,0.49748743718592964,0.0,0.0,0.0,0.330849478390462,0.012809564474807855,0.03333333333333333,0.0
+0.17647058823529413,0.4472361809045226,0.6065573770491803,0.16161616161616163,0.10047281323877069,0.45305514157973176,0.20196413321947054,0.2833333333333333,0.0
+0.058823529411764705,0.4020100502512563,0.6065573770491803,0.1111111111111111,0.07092198581560284,0.44709388971684055,0.19171648163962424,0.016666666666666666,0.0
+0.11764705882352941,0.6984924623115578,0.6147540983606558,0.0,0.0,0.3815201192250373,0.038001707941929974,0.13333333333333333,0.0
+0.058823529411764705,0.45226130653266333,0.5573770491803278,0.08080808080808081,0.0,0.3651266766020865,0.45260461144321085,0.25,0.0
+0.0,0.7085427135678392,0.0,0.0,0.0,0.631892697466468,0.05422715627668659,0.13333333333333333,1.0
+0.7058823529411765,0.7035175879396985,0.6967213114754098,0.3333333333333333,0.0,0.5573770491803279,0.0708795900939368,0.3333333333333333,0.0
+0.29411764705882354,0.7386934673366834,0.6147540983606558,0.0,0.0,0.4456035767511177,0.1520068317677199,0.11666666666666667,0.0
+0.058823529411764705,0.48743718592964824,0.5737704918032787,0.15151515151515152,0.0,0.27123695976154993,0.029461998292058065,0.0,0.0
+0.35294117647058826,0.5376884422110553,0.7213114754098361,0.0,0.0,0.5484351713859911,0.2771135781383433,0.16666666666666666,0.0
+0.0,0.949748743718593,0.8524590163934426,0.25252525252525254,0.0,0.511177347242921,0.1524338172502135,0.3333333333333333,1.0
+0.11764705882352941,0.41708542713567837,0.5409836065573771,0.23232323232323232,0.0591016548463357,0.4798807749627423,0.17890691716481638,0.016666666666666666,0.0
+0.23529411764705882,0.5879396984924623,0.5245901639344263,0.2727272727272727,0.14184397163120568,0.4947839046199703,0.06490179333902649,0.05,0.0
+0.47058823529411764,0.542713567839196,0.5737704918032787,0.0,0.0,0.4545454545454546,0.374466268146883,0.2,1.0
+0.23529411764705882,0.5879396984924623,0.5081967213114754,0.12121212121212122,0.0,0.4426229508196722,0.12894961571306574,0.15,1.0
+0.0,0.9045226130653267,0.639344262295082,0.6363636363636364,0.016548463356973995,0.8852459016393444,1.0,0.06666666666666667,1.0
+0.058823529411764705,0.5025125628140703,0.5901639344262295,0.12121212121212122,0.08274231678486997,0.3770491803278689,0.24765157984628525,0.11666666666666667,0.0
+0.0,0.47738693467336685,0.6557377049180327,0.45454545454545453,0.10874704491725769,0.5439642324888228,0.107600341588386,0.08333333333333333,0.0
+0.0,0.5226130653266332,0.5245901639344263,0.37373737373737376,0.07565011820330969,0.5007451564828614,0.18445772843723313,0.016666666666666666,1.0
+0.0,0.6030150753768844,0.6065573770491803,0.18181818181818182,0.07446808510638298,0.4545454545454546,0.08838599487617418,0.08333333333333333,0.0
+0.058823529411764705,0.4120603015075377,0.5245901639344263,0.13131313131313133,0.11229314420803782,0.315946348733234,0.14389410760034158,0.03333333333333333,0.0
+0.11764705882352941,0.6733668341708543,0.5737704918032787,0.0,0.0,0.4307004470938897,0.1981212638770282,0.03333333333333333,1.0
+0.0,0.457286432160804,0.5573770491803278,0.32323232323232326,0.24822695035460993,0.5946348733233979,0.12937660119555935,0.06666666666666667,0.0
+0.11764705882352941,0.5979899497487438,0.0,0.0,0.0,0.2921013412816692,0.3219470538001708,0.85,0.0
+0.11764705882352941,0.5025125628140703,0.4426229508196721,0.2828282828282828,0.12411347517730496,0.5633383010432191,0.17933390264730997,0.05,0.0
+0.8235294117647058,0.8793969849246231,0.5081967213114754,0.30303030303030304,0.0,0.5007451564828614,0.057216054654141764,0.2833333333333333,1.0
+0.058823529411764705,0.678391959798995,0.4426229508196721,0.0,0.0,0.3979135618479881,0.26003415883859954,0.6833333333333333,0.0
+0.29411764705882354,0.4321608040201005,0.5573770491803278,0.2828282828282828,0.08392434988179669,0.45007451564828616,0.12211784799316822,0.05,0.0
+0.5882352941176471,0.7437185929648241,0.6885245901639344,0.48484848484848486,0.2801418439716312,0.5603576751117736,0.3941076003415883,0.5,1.0
+0.5294117647058824,0.6733668341708543,0.6065573770491803,0.3333333333333333,0.07092198581560284,0.3859910581222057,0.16310845431255336,1.0,0.0
+0.5294117647058824,0.6030150753768844,0.5901639344262295,0.2222222222222222,0.06619385342789598,0.3099850968703428,0.27967549103330486,0.45,0.0
+0.058823529411764705,0.35678391959798994,0.5081967213114754,0.0,0.0,0.3248882265275708,0.14432109308283517,0.08333333333333333,0.0
+0.47058823529411764,0.37185929648241206,0.5737704918032787,0.40404040404040403,0.057919621749408984,0.526080476900149,0.2677198975234842,0.3,0.0
+0.29411764705882354,0.44221105527638194,0.639344262295082,0.30303030303030304,0.0,0.41132637853949333,0.07685738684884713,0.26666666666666666,0.0
+0.5882352941176471,0.5778894472361809,0.8032786885245902,0.0,0.0,0.35767511177347244,0.4030742954739539,0.21666666666666667,0.0
+0.0,0.6231155778894473,0.45901639344262296,0.13131313131313133,0.12411347517730496,0.3248882265275708,0.1596925704526046,0.0,0.0
+0.0,0.37185929648241206,0.4262295081967213,0.10101010101010101,0.0425531914893617,0.41430700447093893,0.08155422715627668,0.016666666666666666,0.0
+0.0,0.48743718592964824,0.5245901639344263,0.36363636363636365,0.1182033096926714,0.5484351713859911,0.2228864218616567,0.06666666666666667,0.0
+0.47058823529411764,0.6030150753768844,0.0,0.0,0.0,0.44709388971684055,0.04483347566182749,0.2833333333333333,1.0
+0.35294117647058826,0.7738693467336684,0.639344262295082,0.41414141414141414,0.16548463356973994,0.6870342771982118,0.2105038428693424,0.1,0.0
+0.058823529411764705,0.7236180904522613,0.6721311475409836,0.40404040404040403,0.0,0.6154992548435172,0.22587532023911186,0.11666666666666667,0.0
+0.0,0.6884422110552764,0.5737704918032787,0.3838383838383838,0.0,0.4947839046199703,0.03928266438941076,0.016666666666666666,0.0
+0.0,0.5979899497487438,0.5409836065573771,0.2727272727272727,0.0,0.5782414307004471,0.07728437233134072,0.016666666666666666,0.0
+0.4117647058823529,0.6834170854271356,0.7377049180327869,0.0,0.0,0.4456035767511177,0.05636208368915457,0.48333333333333334,0.0
+0.23529411764705882,0.5728643216080402,0.5245901639344263,0.0,0.0,0.4307004470938897,0.02049530315969257,0.05,0.0
+0.0,0.6884422110552764,0.6885245901639344,0.2727272727272727,0.0,0.40685543964232496,0.06532877882152008,0.6333333333333333,0.0
+0.11764705882352941,0.5276381909547738,0.6557377049180327,0.45454545454545453,0.22576832151300236,0.5022354694485843,0.2702818104184458,0.13333333333333333,1.0
+0.4117647058823529,0.5728643216080402,0.6229508196721312,0.1717171717171717,0.13002364066193853,0.35469448584202684,0.16567036720751493,0.16666666666666666,0.0
+0.47058823529411764,0.6331658291457286,0.6065573770491803,0.3838383838383838,0.08865248226950355,0.3859910581222057,0.035866780529461996,0.3,0.0
+0.23529411764705882,0.6633165829145728,0.7049180327868853,0.31313131313131315,0.0,0.41728763040238454,0.14560204953031594,0.7,0.0
+0.17647058823529413,0.7939698492462312,0.5737704918032787,0.30303030303030304,0.3877068557919622,0.5290611028315947,0.1135781383432963,0.23333333333333334,1.0
+0.0,0.6180904522613065,0.7213114754098361,0.37373737373737376,0.0,0.5245901639344264,0.05081127241673783,0.13333333333333333,0.0
+0.23529411764705882,0.4271356783919598,0.47540983606557374,0.2222222222222222,0.057919621749408984,0.41430700447093893,0.0973526900085397,0.11666666666666667,0.0
+0.0,0.4221105527638191,0.6721311475409836,0.31313131313131315,0.14775413711583923,0.5692995529061103,0.06618274978650726,0.03333333333333333,0.0
+0.0,0.7286432160804021,0.0,0.0,0.0,0.6587183308494785,0.23569598633646457,0.16666666666666666,1.0
+0.0,0.678391959798995,0.5573770491803278,0.42424242424242425,0.29550827423167847,0.6304023845007451,0.12254483347566181,0.05,1.0
+0.058823529411764705,0.6984924623115578,0.5081967213114754,0.41414141414141414,0.5673758865248227,0.6065573770491804,0.19555935098206662,0.0,0.0
+0.0,0.8693467336683417,0.639344262295082,0.32323232323232326,0.3132387706855792,0.6929955290611028,0.4615713065755764,0.6166666666666667,0.0
+0.23529411764705882,0.49748743718592964,0.5901639344262295,0.1717171717171717,0.0,0.3815201192250373,0.09222886421861655,0.11666666666666667,0.0
+0.47058823529411764,0.9748743718592965,0.6557377049180327,0.0,0.0,0.38897168405365135,0.20196413321947054,0.7666666666666667,0.0
+0.11764705882352941,0.41708542713567837,0.5327868852459017,0.2828282828282828,0.07801418439716312,0.5484351713859911,0.23526900085397098,0.05,0.0
+0.11764705882352941,0.4472361809045226,0.7377049180327869,0.30303030303030304,0.0,0.49925484351713867,0.09137489325362937,0.35,0.0
+0.23529411764705882,0.49748743718592964,0.5573770491803278,0.3838383838383838,0.0,0.488822652757079,0.028608027327070875,0.2,0.0
+0.23529411764705882,0.628140703517588,0.5737704918032787,0.18181818181818182,0.14420803782505912,0.4307004470938897,0.4551665243381724,0.4,1.0
+0.17647058823529413,0.4020100502512563,0.0,0.0,0.0,0.0,0.040990606319385135,0.016666666666666666,0.0
+0.35294117647058826,0.8341708542713567,0.6065573770491803,0.0,0.0,0.3964232488822653,0.0964987190435525,0.75,0.0
+0.29411764705882354,0.5527638190954773,0.5573770491803278,0.0,0.0,0.3874813710879285,0.09137489325362937,0.15,0.0
+0.11764705882352941,0.40703517587939697,0.5901639344262295,0.15151515151515152,0.08983451536643026,0.4485842026825634,0.20025619128949615,0.06666666666666667,0.0
+0.4117647058823529,0.9798994974874372,0.5737704918032787,0.3333333333333333,0.17139479905437352,0.3740685543964233,0.036293766011955594,0.5666666666666667,1.0
+0.35294117647058826,0.7738693467336684,0.6065573770491803,0.32323232323232326,0.2281323877068558,0.436661698956781,0.32493595217762594,0.3,0.0
+0.11764705882352941,0.5879396984924623,0.7377049180327869,0.1919191919191919,0.08392434988179669,0.37555886736214605,0.10034158838599487,0.0,0.0
+0.17647058823529413,0.4221105527638191,0.5901639344262295,0.32323232323232326,0.0,0.5543964232488824,0.0807002561912895,0.11666666666666667,0.0
+0.35294117647058826,0.0,0.5573770491803278,0.41414141414141414,0.0,0.5812220566318927,0.2771135781383433,0.3333333333333333,1.0
+0.4117647058823529,0.4723618090452261,0.5245901639344263,0.25252525252525254,0.0933806146572104,0.496274217585693,0.28181041844577287,0.3333333333333333,0.0
+0.17647058823529413,0.4824120603015075,0.639344262295082,0.3939393939393939,0.0,0.555886736214605,0.06831767719897522,0.31666666666666665,0.0
+0.5882352941176471,0.3768844221105528,0.6721311475409836,0.0,0.0,0.496274217585693,0.07899231426131512,0.2833333333333333,0.0
+0.0,0.9045226130653267,0.7377049180327869,0.26262626262626265,0.10638297872340426,0.5439642324888228,0.10076857386848846,0.23333333333333334,1.0
+0.058823529411764705,0.6532663316582915,0.4918032786885246,0.23232323232323232,0.20094562647754138,0.4262295081967214,0.26216908625106744,0.0,0.0
+0.11764705882352941,0.4221105527638191,0.4098360655737705,0.23232323232323232,0.08983451536643026,0.45305514157973176,0.38001707941929974,0.0,0.0
+0.47058823529411764,0.6030150753768844,0.639344262295082,0.0,0.0,0.37257824143070045,0.14133219470538,0.7166666666666667,0.0
+0.7058823529411765,0.4221105527638191,0.5901639344262295,0.31313131313131315,0.0,0.4426229508196722,0.09350982066609734,0.4166666666666667,1.0
+0.0,0.6984924623115578,0.5081967213114754,0.1717171717171717,0.24822695035460993,0.32935916542473925,0.055081127241673786,0.0,0.0
+0.5294117647058824,0.457286432160804,0.5573770491803278,0.0,0.0,0.36065573770491804,0.05209222886421862,0.6166666666666667,0.0
+0.11764705882352941,0.457286432160804,0.5081967213114754,0.0,0.0,0.40685543964232496,0.19086251067463705,0.016666666666666666,0.0
+0.17647058823529413,0.49748743718592964,0.4426229508196721,0.1919191919191919,0.1016548463356974,0.3815201192250373,0.032450896669513236,0.05,0.0
+0.17647058823529413,0.8190954773869347,0.5737704918032787,0.18181818181818182,0.12411347517730496,0.4709388971684054,0.08112724167378309,0.11666666666666667,1.0
+0.5294117647058824,0.7286432160804021,0.7213114754098361,0.3434343434343434,0.1950354609929078,0.451564828614009,0.2959009393680615,0.5333333333333333,1.0
+0.4117647058823529,0.628140703517588,0.7049180327868853,0.0,0.0,0.5603576751117736,0.0964987190435525,0.5,0.0
+0.7647058823529411,0.38190954773869346,0.4918032786885246,0.0,0.0,0.488822652757079,0.043552519214346705,0.3333333333333333,0.0
+0.35294117647058826,0.6482412060301508,0.7377049180327869,0.0707070707070707,0.38534278959810875,0.2921013412816692,0.215200683176772,0.65,0.0
+0.11764705882352941,0.3417085427135678,0.5737704918032787,0.32323232323232326,0.07801418439716312,0.37257824143070045,0.04654141759180188,0.06666666666666667,0.0
+0.17647058823529413,0.6231155778894473,0.6557377049180327,0.3333333333333333,0.1536643026004728,0.4947839046199703,0.0969257045260461,0.08333333333333333,0.0
+0.35294117647058826,0.5728643216080402,0.0,0.0,0.0,0.0,0.04739538855678907,0.08333333333333333,0.0
+0.5294117647058824,0.6532663316582915,0.5737704918032787,0.0,0.0,0.5096870342771983,0.24508966695132367,0.4,1.0
+0.17647058823529413,0.628140703517588,0.47540983606557374,0.0,0.0,0.4709388971684054,0.031169940222032448,0.05,0.0
+0.17647058823529413,0.4371859296482412,0.4918032786885246,0.18181818181818182,0.0,0.3248882265275708,0.15627668659265584,0.0,0.0
+0.058823529411764705,0.48743718592964824,0.5245901639344263,0.1919191919191919,0.09692671394799054,0.27123695976154993,0.09436379163108453,0.0,0.0
+0.17647058823529413,0.5829145728643216,0.6065573770491803,0.15151515151515152,0.12411347517730496,0.3919523099850969,0.01238257899231426,0.05,0.0
+0.0,0.5879396984924623,0.5409836065573771,0.31313131313131315,0.2222222222222222,0.459016393442623,0.177198975234842,0.016666666666666666,0.0
+0.0,0.5577889447236181,0.5327868852459017,0.0,0.0,0.3666169895678093,0.24850555081127243,0.16666666666666666,0.0
+0.11764705882352941,0.6130653266331658,0.4918032786885246,0.18181818181818182,0.12529550827423167,0.444113263785395,0.27284372331340734,0.016666666666666666,0.0
+0.0,0.5376884422110553,0.6229508196721312,0.0,0.0,0.6751117734724292,0.25960717335610595,0.05,0.0
+0.058823529411764705,0.4321608040201005,0.5409836065573771,0.5252525252525253,0.0768321513002364,0.6154992548435172,0.3582408198121264,0.13333333333333333,0.0
+0.35294117647058826,0.457286432160804,0.0,0.0,0.0,0.444113263785395,0.18061485909479078,0.16666666666666666,0.0
+0.058823529411764705,0.3869346733668342,0.45901639344262296,0.30303030303030304,0.06619385342789598,0.496274217585693,0.5008539709649871,0.05,0.0
+0.23529411764705882,0.6633165829145728,0.0,0.0,0.0,0.4903129657228018,0.09564474807856531,0.03333333333333333,1.0
+0.0,0.5276381909547738,0.7377049180327869,0.0,0.0,0.4411326378539494,0.05081127241673783,0.4166666666666667,0.0
+0.0,0.2864321608040201,0.4918032786885246,0.0,0.0,0.323397913561848,0.28052946199829204,0.7666666666666667,0.0
+0.0,0.6381909547738693,0.6557377049180327,0.37373737373737376,0.24822695035460993,0.5409836065573771,0.30999146029035013,0.03333333333333333,0.0
+0.17647058823529413,0.6482412060301508,0.7540983606557377,0.494949494949495,0.18321513002364065,0.5424739195230999,0.38001707941929974,0.18333333333333332,1.0
+0.47058823529411764,0.5025125628140703,0.6065573770491803,0.40404040404040403,0.2541371158392435,0.587183308494784,0.24893253629376602,0.36666666666666664,1.0
+0.17647058823529413,0.6432160804020101,0.5901639344262295,0.25252525252525254,0.22458628841607564,0.4828614008941878,0.20111016225448336,0.1,1.0
+0.5882352941176471,0.45226130653266333,0.6967213114754098,0.32323232323232326,0.0,0.5201192250372578,0.3189581554227156,0.5833333333333334,1.0
+0.23529411764705882,0.4221105527638191,0.7377049180327869,0.23232323232323232,0.06619385342789598,0.5886736214605067,0.034585824081981215,0.06666666666666667,0.0
+0.058823529411764705,0.44221105527638194,0.639344262295082,0.29292929292929293,0.08983451536643026,0.4769001490312966,0.12254483347566181,0.13333333333333333,0.0
+0.47058823529411764,0.9346733668341709,0.7377049180327869,0.35353535353535354,0.26595744680851063,0.5141579731743666,0.14730999146029033,0.26666666666666666,1.0
+0.29411764705882354,0.9396984924623115,0.6229508196721312,0.2727272727272727,0.24468085106382978,0.6497764530551416,0.40819812126387706,0.5333333333333333,1.0
+0.23529411764705882,0.6582914572864321,0.5573770491803278,0.21212121212121213,0.19621749408983452,0.49329359165424747,0.035012809564474806,0.11666666666666667,0.0
+0.058823529411764705,0.8241206030150754,0.6721311475409836,0.43434343434343436,0.07919621749408984,0.488822652757079,0.11229718189581554,0.48333333333333334,0.0
+0.23529411764705882,0.949748743718593,0.9016393442622951,0.31313131313131315,0.0,0.42473919523099857,0.25704526046114434,0.26666666666666666,0.0
+0.058823529411764705,0.5829145728643216,0.5737704918032787,0.2828282828282828,0.0,0.40834575260804773,0.053800170794193,0.0,0.0
+0.17647058823529413,0.4221105527638191,0.5573770491803278,0.30303030303030304,0.12529550827423167,0.4754098360655738,0.21904355251921434,0.06666666666666667,0.0
+0.35294117647058826,0.5728643216080402,0.7213114754098361,0.0,0.0,0.41430700447093893,0.07216054654141758,0.75,0.0
+0.058823529411764705,0.44221105527638194,0.5081967213114754,0.24242424242424243,0.05200945626477541,0.4456035767511177,0.14688300597779674,0.03333333333333333,0.0
+0.058823529411764705,0.4221105527638191,0.5245901639344263,0.23232323232323232,0.1359338061465721,0.5499254843517138,0.1678052946199829,0.11666666666666667,0.0
+0.4117647058823529,0.6231155778894473,0.5737704918032787,0.3333333333333333,0.2541371158392435,0.3800298062593145,0.035439795046968404,0.26666666666666666,0.0
+0.058823529411764705,0.48743718592964824,0.5737704918032787,0.40404040404040403,0.0,0.5678092399403876,0.059777967549103334,0.15,0.0
+0.47058823529411764,0.5527638190954773,0.6229508196721312,0.0,0.0,0.41430700447093893,0.06789069171648163,0.6166666666666667,0.0
+0.6470588235294118,0.5175879396984925,0.5573770491803278,0.40404040404040403,0.0,0.6885245901639345,0.02049530315969257,0.35,0.0
+0.6470588235294118,0.4271356783919598,0.6065573770491803,0.0,0.0,0.4485842026825634,0.09479077711357813,0.23333333333333334,0.0
+0.35294117647058826,0.628140703517588,0.6229508196721312,0.0,0.0,0.503725782414307,0.018360375747224593,0.55,1.0
+0.0,0.9949748743718593,0.5409836065573771,0.32323232323232326,0.32387706855791965,0.6154992548435172,0.18104184457728437,0.11666666666666667,1.0
+0.058823529411764705,0.4371859296482412,0.5573770491803278,0.3434343434343434,0.09101654846335698,0.5603576751117736,0.13791631084543127,0.05,0.0
+0.35294117647058826,0.49748743718592964,0.4918032786885246,0.1919191919191919,0.06382978723404255,0.4008941877794337,0.17890691716481638,0.18333333333333332,0.0
+0.0,0.457286432160804,0.6557377049180327,0.0,0.0,0.4828614008941878,0.2233134073441503,0.1,0.0
+0.11764705882352941,0.47738693467336685,0.4426229508196721,0.1414141414141414,0.10401891252955082,0.38897168405365135,0.2860802732707088,0.016666666666666666,0.0
+0.058823529411764705,0.49748743718592964,0.5901639344262295,0.30303030303030304,0.02127659574468085,0.5752608047690015,0.14261315115286077,0.0,0.0
+0.35294117647058826,0.4623115577889447,0.5081967213114754,0.32323232323232326,0.14893617021276595,0.4769001490312966,0.002988898377455169,0.4166666666666667,0.0
+0.23529411764705882,0.7738693467336684,0.5901639344262295,0.29292929292929293,0.14893617021276595,0.466467958271237,0.11101622544833475,0.26666666666666666,0.0
+0.0,0.6080402010050251,0.5409836065573771,0.30303030303030304,0.1950354609929078,0.511177347242921,0.0533731853116994,0.2,1.0
+0.17647058823529413,0.39195979899497485,0.5737704918032787,0.0,0.0,0.4843517138599106,0.08198121263877028,0.3,0.0
+0.11764705882352941,0.6532663316582915,0.7868852459016393,0.0,0.0,0.33681073025335323,0.08112724167378309,0.0,0.0
+0.17647058823529413,0.5577889447236181,0.47540983606557374,0.31313131313131315,0.05200945626477541,0.4396423248882266,0.1502988898377455,0.016666666666666666,0.0
+0.11764705882352941,0.49246231155778897,0.4918032786885246,0.1717171717171717,0.14184397163120568,0.5171385991058123,0.05123825789923143,0.016666666666666666,0.0
+0.058823529411764705,0.7185929648241206,0.7049180327868853,0.30303030303030304,0.3900709219858156,0.4485842026825634,0.3475661827497865,0.03333333333333333,0.0
+0.058823529411764705,0.5979899497487438,0.36065573770491804,0.47474747474747475,0.07446808510638298,0.5290611028315947,0.08625106746370624,0.06666666666666667,0.0
+0.35294117647058826,0.542713567839196,0.36065573770491804,0.20202020202020202,0.1536643026004728,0.35767511177347244,0.31383432963279245,0.23333333333333334,0.0
+0.11764705882352941,0.592964824120603,0.6557377049180327,0.0,0.0,0.639344262295082,0.26259607173356103,0.0,1.0
+0.5882352941176471,0.6683417085427136,0.5573770491803278,0.0,0.0,0.4023845007451565,0.07130657557643039,0.25,0.0
+0.11764705882352941,0.9899497487437185,0.5737704918032787,1.0,0.0,0.5171385991058123,0.2122117847993168,0.6833333333333333,1.0
+0.0,0.7587939698492462,0.7377049180327869,0.46464646464646464,0.0,0.6274217585692996,0.12510674637062338,0.0,1.0
+0.35294117647058826,0.5477386934673367,0.4918032786885246,0.2727272727272727,0.0,0.37257824143070045,0.05465414175918019,0.1,0.0
+0.7058823529411765,0.6080402010050251,0.639344262295082,0.1717171717171717,0.0,0.3949329359165425,0.07728437233134072,0.6833333333333333,0.0
+0.47058823529411764,0.5025125628140703,0.6229508196721312,0.0,0.0,0.5767511177347244,0.04782237403928266,0.35,0.0
+0.47058823529411764,0.6231155778894473,0.6229508196721312,0.24242424242424243,0.7092198581560284,0.4277198211624441,0.26003415883859954,0.5166666666666667,1.0
+0.058823529411764705,0.46733668341708545,0.45901639344262296,0.1111111111111111,0.0,0.33532041728763046,0.14474807856532876,0.016666666666666666,0.0
+0.47058823529411764,0.7185929648241206,0.5409836065573771,0.0,0.0,0.5201192250372578,0.021776259607173356,0.3333333333333333,1.0
+0.35294117647058826,0.5175879396984925,0.5409836065573771,0.0,0.0,0.3621460506706409,0.07301451750640478,0.13333333333333333,0.0
+0.17647058823529413,0.8844221105527639,0.7049180327868853,0.2727272727272727,0.18439716312056736,0.496274217585693,0.45943637916310837,0.5166666666666667,1.0
+0.0,0.36683417085427134,0.0,0.0,0.0,0.31445603576751124,0.11272416737830913,0.06666666666666667,0.0
+0.6470588235294118,0.5577889447236181,0.6885245901639344,0.40404040404040403,0.0,0.6974664679582713,0.3616567036720752,0.4,1.0
+0.11764705882352941,0.5628140703517588,0.639344262295082,0.5050505050505051,0.16548463356973994,0.587183308494784,0.04141759180187873,0.05,0.0
+0.17647058823529413,0.6633165829145728,0.6557377049180327,0.0,0.0,0.5126676602086438,0.13834329632792486,0.38333333333333336,1.0
+0.11764705882352941,0.4120603015075377,0.4262295081967213,0.2222222222222222,0.1359338061465721,0.42473919523099857,0.6921434671221178,0.06666666666666667,0.0
+0.35294117647058826,0.6180904522613065,0.5901639344262295,0.45454545454545453,0.2718676122931442,0.5007451564828614,0.27967549103330486,0.21666666666666667,0.0
+0.0,0.9447236180904522,0.6721311475409836,0.1414141414141414,0.2186761229314421,0.4769001490312966,0.2578992314261315,0.016666666666666666,1.0
+0.0,0.33668341708542715,0.6229508196721312,0.0,0.0,0.6751117734724292,0.04953031596925705,0.4166666666666667,0.0
+0.058823529411764705,0.4472361809045226,0.19672131147540983,0.1919191919191919,0.02955082742316785,0.41430700447093893,0.2053800170794193,0.0,0.0
+0.058823529411764705,0.8693467336683417,0.6065573770491803,0.0,0.0,0.5484351713859911,0.00426985482493595,0.2833333333333333,1.0
+0.058823529411764705,0.5477386934673367,0.3114754098360656,0.18181818181818182,0.14184397163120568,0.34426229508196726,0.14047822374039282,0.08333333333333333,0.0
+0.058823529411764705,0.542713567839196,0.7213114754098361,0.1919191919191919,0.0,0.40387481371087935,0.13748932536293765,0.05,0.0
+0.35294117647058826,0.4824120603015075,0.0,0.0,0.0,0.35320417287630407,0.04782237403928266,0.11666666666666667,0.0
+0.058823529411764705,0.6231155778894473,0.6065573770491803,0.36363636363636365,0.0,0.41430700447093893,0.009393680614859097,0.15,0.0
+0.4117647058823529,0.7537688442211056,0.639344262295082,0.29292929292929293,0.14893617021276595,0.5245901639344264,0.26216908625106744,0.55,1.0
+0.23529411764705882,0.9195979899497487,0.0,0.0,0.0,0.42324888226527574,0.057216054654141764,0.25,1.0
+0.058823529411764705,0.6231155778894473,0.4918032786885246,0.32323232323232326,0.0,0.533532041728763,0.18616567036720752,0.0,0.0
+0.058823529411764705,0.9095477386934674,0.639344262295082,0.42424242424242425,0.3463356973995272,0.5961251862891208,0.5038428693424423,0.016666666666666666,1.0
+0.058823529411764705,0.4623115577889447,0.5081967213114754,0.25252525252525254,0.04846335697399527,0.2906110283159464,0.17250213492741245,0.06666666666666667,0.0
+0.0,0.7638190954773869,0.6721311475409836,0.3939393939393939,0.3215130023640662,0.6184798807749627,0.08198121263877028,0.1,0.0
+0.058823529411764705,0.5577889447236181,0.5081967213114754,0.13131313131313133,0.21513002364066194,0.35767511177347244,0.025619128949615717,0.03333333333333333,0.0
+0.17647058823529413,0.5326633165829145,0.4426229508196721,0.21212121212121213,0.1867612293144208,0.4605067064083458,0.09137489325362937,0.05,0.0
+0.17647058823529413,0.8743718592964824,0.47540983606557374,0.2222222222222222,0.2293144208037825,0.4903129657228018,0.21989752348420152,0.25,1.0
+0.4117647058823529,0.8442211055276382,0.7213114754098361,0.42424242424242425,0.37943262411347517,0.5692995529061103,0.302732707087959,0.31666666666666665,1.0
+0.35294117647058826,0.5276381909547738,0.6557377049180327,0.2828282828282828,0.0,0.4843517138599106,0.3415883859948762,0.08333333333333333,0.0
+0.6470588235294118,0.6934673366834171,0.6065573770491803,0.26262626262626265,0.1702127659574468,0.5380029806259315,0.20452604611443212,0.48333333333333334,1.0
+0.17647058823529413,0.5326633165829145,0.5901639344262295,0.0,0.0,0.3845007451564829,0.055081127241673786,0.1,0.0
+0.35294117647058826,0.5879396984924623,0.7868852459016393,0.0,0.0,0.4277198211624441,0.033731853116994025,0.15,0.0
+0.11764705882352941,0.3417085427135678,0.5081967213114754,0.13131313131313133,0.01773049645390071,0.2995529061102832,0.07643040136635354,0.03333333333333333,0.0
+0.5294117647058824,0.5628140703517588,0.6721311475409836,0.24242424242424243,0.0,0.42026825633383014,0.5140905209222886,0.48333333333333334,1.0
+0.0,0.5979899497487438,0.0,0.0,0.0,0.4828614008941878,0.026900085397096492,0.05,1.0
+0.11764705882352941,0.5628140703517588,0.7049180327868853,0.42424242424242425,0.18912529550827423,0.5722801788375559,0.07173356105892399,0.11666666666666667,0.0
+0.11764705882352941,0.4623115577889447,0.6229508196721312,0.20202020202020202,0.0,0.36065573770491804,0.6917164816396242,0.11666666666666667,0.0
+0.35294117647058826,0.9195979899497487,0.7704918032786885,0.0,0.0,0.6080476900149031,0.5905209222886422,0.4,0.0
+0.0,0.4723618090452261,0.5737704918032787,0.2727272727272727,0.1359338061465721,0.6482861400894189,0.11485909479077709,0.0,0.0
+0.11764705882352941,0.542713567839196,0.5245901639344263,0.0,0.0,0.459016393442623,0.034158838599487616,0.0,0.0
+0.23529411764705882,0.45226130653266333,0.7213114754098361,0.47474747474747475,0.06382978723404255,0.5618479880774964,0.12126387702818103,0.13333333333333333,0.0
+0.0,0.628140703517588,0.5573770491803278,0.0,0.0,0.3681073025335321,0.05465414175918019,0.0,0.0
+0.0,0.6633165829145728,0.639344262295082,0.0,0.0,0.4828614008941878,0.13450042698548248,0.0,0.0
+0.29411764705882354,0.6432160804020101,0.6557377049180327,0.0,0.0,0.5156482861400895,0.02818104184457728,0.4,0.0
+0.23529411764705882,0.4723618090452261,0.5327868852459017,0.2222222222222222,0.0,0.3681073025335321,0.02988898377455166,0.0,0.0
+0.4117647058823529,0.5728643216080402,0.5245901639344263,0.0,0.0,0.40834575260804773,0.27924850555081127,0.21666666666666667,1.0
+0.0,0.5125628140703518,0.639344262295082,0.40404040404040403,0.10638297872340426,0.5141579731743666,0.06831767719897522,0.05,0.0
+0.11764705882352941,0.5577889447236181,0.4918032786885246,0.0,0.0,0.3904619970193741,0.11315115286080274,0.03333333333333333,0.0
+0.058823529411764705,0.6432160804020101,0.6721311475409836,0.1717171717171717,0.21631205673758866,0.4098360655737705,0.015798462852263023,0.016666666666666666,0.0
+0.5882352941176471,0.4623115577889447,0.5081967213114754,0.0,0.0,0.3859910581222057,0.038001707941929974,0.16666666666666666,0.0
+0.7647058823529411,0.5226130653266332,0.5901639344262295,0.0,0.0,0.46497764530551416,0.16524338172502134,0.2833333333333333,1.0
+0.29411764705882354,0.5226130653266332,0.6065573770491803,0.0,0.0,0.42921013412816694,0.03202391118701964,0.45,0.0
+0.11764705882352941,0.4723618090452261,0.6229508196721312,0.18181818181818182,0.07801418439716312,0.4709388971684054,0.2438087105038429,0.03333333333333333,0.0
+0.4117647058823529,0.48743718592964824,0.6229508196721312,0.32323232323232326,0.10756501182033097,0.609538002980626,0.33859948761742104,0.18333333333333332,1.0
+0.058823529411764705,0.5025125628140703,0.6065573770491803,0.12121212121212122,0.054373522458628844,0.2906110283159464,0.030315969257045258,0.11666666666666667,0.0
+0.0,0.5125628140703518,0.7049180327868853,0.1717171717171717,0.12411347517730496,0.436661698956781,0.2634500426985482,0.1,0.0
+0.23529411764705882,0.6432160804020101,0.5737704918032787,0.0,0.0,0.511177347242921,0.09607173356105891,0.05,0.0
+0.35294117647058826,0.7386934673366834,0.6557377049180327,0.0,0.0,0.4396423248882266,0.042698548249359515,0.48333333333333334,1.0
+0.23529411764705882,0.45226130653266333,0.0,0.0,0.0,0.41728763040238454,0.22715627668659266,0.16666666666666666,0.0
+0.17647058823529413,0.5175879396984925,0.5901639344262295,0.30303030303030304,0.17966903073286053,0.41132637853949333,0.2783945345858241,0.1,0.0
+0.11764705882352941,0.7889447236180904,0.6065573770491803,0.35353535353535354,0.5200945626477541,0.587183308494784,0.023911187019641334,0.15,0.0
+0.058823529411764705,0.8391959798994975,0.6065573770491803,0.1717171717171717,0.1702127659574468,0.34873323397913564,0.15755764304013664,0.2,1.0
+0.0,0.8994974874371859,0.4098360655737705,0.36363636363636365,0.1879432624113475,0.5633383010432191,0.1609735269000854,0.016666666666666666,1.0
+0.6470588235294118,0.6834170854271356,0.6885245901639344,0.35353535353535354,0.1536643026004728,0.42175856929955297,0.07771135781383433,0.35,1.0
+0.0,0.5376884422110553,0.4918032786885246,0.25252525252525254,0.0,0.39344262295081966,0.02348420153714774,0.03333333333333333,0.0
+0.058823529411764705,0.457286432160804,0.4426229508196721,0.25252525252525254,0.1182033096926714,0.37555886736214605,0.06660973526900087,0.03333333333333333,0.0
+0.058823529411764705,0.5879396984924623,0.4918032786885246,0.23232323232323232,0.12529550827423167,0.503725782414307,0.16567036720751493,0.1,0.0
+0.29411764705882354,0.6180904522613065,0.6065573770491803,0.40404040404040403,0.09101654846335698,0.5081967213114754,0.08155422715627668,0.11666666666666667,0.0
+0.11764705882352941,0.6030150753768844,0.4426229508196721,0.0,0.0,0.3994038748137109,0.1609735269000854,0.1,0.0
+0.058823529411764705,0.5326633165829145,0.5737704918032787,0.2828282828282828,0.1595744680851064,0.5096870342771983,0.027327070879590087,0.016666666666666666,0.0
+0.11764705882352941,0.7788944723618091,0.4262295081967213,0.2727272727272727,0.6382978723404256,0.5767511177347244,0.06917164816396242,0.06666666666666667,1.0
+0.11764705882352941,0.507537688442211,0.47540983606557374,0.35353535353535354,0.10638297872340426,0.3248882265275708,0.03287788215200683,0.016666666666666666,0.0
+0.058823529411764705,0.6030150753768844,0.6557377049180327,0.48484848484848486,0.2364066193853428,0.57973174366617,0.46285226302305715,0.3333333333333333,0.0
+0.6470588235294118,0.6381909547738693,0.8688524590163934,0.0,0.0,0.5812220566318927,0.04782237403928266,0.5,0.0
+0.17647058823529413,0.4020100502512563,0.6721311475409836,0.31313131313131315,0.08274231678486997,0.5096870342771983,0.5183603757472246,0.1,1.0
+0.5882352941176471,0.8140703517587939,0.6885245901639344,0.0,0.0,0.4128166915052161,0.0444064901793339,0.55,0.0
+0.058823529411764705,1.0,0.6229508196721312,0.43434343434343436,0.0,0.639344262295082,0.5619128949615713,0.016666666666666666,1.0
+0.47058823529411764,0.8391959798994975,0.8688524590163934,0.46464646464646464,0.2730496453900709,0.5603576751117736,0.037147736976942784,0.36666666666666664,1.0
+0.5294117647058824,0.7286432160804021,0.6557377049180327,0.46464646464646464,0.1536643026004728,0.5648286140089419,0.23868488471391974,0.31666666666666665,1.0
+0.35294117647058826,0.5778894472361809,0.4918032786885246,0.3939393939393939,0.0,0.5022354694485843,0.07130657557643039,0.31666666666666665,1.0
+0.058823529411764705,0.5628140703517588,0.6557377049180327,0.45454545454545453,0.15602836879432624,0.518628912071535,0.059350982066609735,0.05,0.0
+0.23529411764705882,0.7286432160804021,0.6721311475409836,0.18181818181818182,0.0,0.4843517138599106,0.06703672075149443,0.8166666666666667,1.0
+0.058823529411764705,0.46733668341708545,0.5737704918032787,0.31313131313131315,0.0,0.45305514157973176,0.10119555935098205,0.03333333333333333,0.0
\ No newline at end of file