HADOOP-11540. Raw Reed-Solomon coder using Intel ISA-L library. Contributed by Kai Zheng
diff --git a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
index a644aa5..b650eae 100644
--- a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
@@ -47,6 +47,19 @@
        <Field name="out" />
        <Bug pattern="IS2_INCONSISTENT_SYNC" />
      </Match>
+    <!--
+       The nativeCoder field is get/set and used by native codes.
+    -->
+    <Match>
+        <Class name="org.apache.hadoop.io.erasurecode.rawcoder.AbstractNativeRawEncoder" />
+        <Field name="nativeCoder" />
+        <Bug pattern="UUF_UNUSED_FIELD" />
+    </Match>
+    <Match>
+        <Class name="org.apache.hadoop.io.erasurecode.rawcoder.AbstractNativeRawDecoder" />
+        <Field name="nativeCoder" />
+        <Bug pattern="UUF_UNUSED_FIELD" />
+    </Match>
      <!-- 
        Further SaslException should be ignored during cleanup and
        original exception should be re-thrown.
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 398bb84..ef2fdf5 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -631,6 +631,8 @@
                     <javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Compressor</javahClassName>
                     <javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Decompressor</javahClassName>
                     <javahClassName>org.apache.hadoop.io.erasurecode.ErasureCodeNative</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawEncoder</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawDecoder</javahClassName>
                     <javahClassName>org.apache.hadoop.crypto.OpensslCipher</javahClassName>
                     <javahClassName>org.apache.hadoop.crypto.random.OpensslSecureRandom</javahClassName>
                     <javahClassName>org.apache.hadoop.util.NativeCrc32</javahClassName>
@@ -769,6 +771,8 @@
                     <javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Compressor</javahClassName>
                     <javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Decompressor</javahClassName>
                     <javahClassName>org.apache.hadoop.io.erasurecode.ErasureCodeNative</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawEncoder</javahClassName>
+                    <javahClassName>org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawDecoder</javahClassName>
                     <javahClassName>org.apache.hadoop.crypto.OpensslCipher</javahClassName>
                     <javahClassName>org.apache.hadoop.crypto.random.OpensslSecureRandom</javahClassName>
                     <javahClassName>org.apache.hadoop.util.NativeCrc32</javahClassName>
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawDecoder.java
new file mode 100644
index 0000000..55edb64
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawDecoder.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Abstract native raw decoder for all native coders to extend with.
+ */
+@InterfaceAudience.Private
+abstract class AbstractNativeRawDecoder extends RawErasureDecoder {
+  public static Logger LOG =
+      LoggerFactory.getLogger(AbstractNativeRawDecoder.class);
+
+  public AbstractNativeRawDecoder(ErasureCoderOptions coderOptions) {
+    super(coderOptions);
+  }
+
+  @Override
+  protected void doDecode(ByteBufferDecodingState decodingState) {
+    int[] inputOffsets = new int[decodingState.inputs.length];
+    int[] outputOffsets = new int[decodingState.outputs.length];
+
+    ByteBuffer buffer;
+    for (int i = 0; i < decodingState.inputs.length; ++i) {
+      buffer = decodingState.inputs[i];
+      if (buffer != null) {
+        inputOffsets[i] = buffer.position();
+      }
+    }
+
+    for (int i = 0; i < decodingState.outputs.length; ++i) {
+      buffer = decodingState.outputs[i];
+      outputOffsets[i] = buffer.position();
+    }
+
+    performDecodeImpl(decodingState.inputs, inputOffsets,
+        decodingState.decodeLength, decodingState.erasedIndexes,
+        decodingState.outputs, outputOffsets);
+  }
+
+  protected abstract void performDecodeImpl(ByteBuffer[] inputs,
+                                            int[] inputOffsets, int dataLen,
+                                            int[] erased, ByteBuffer[] outputs,
+                                            int[] outputOffsets);
+
+  @Override
+  protected void doDecode(ByteArrayDecodingState decodingState) {
+    LOG.warn("convertToByteBufferState is invoked, " +
+        "not efficiently. Please use direct ByteBuffer inputs/outputs");
+
+    ByteBufferDecodingState bbdState = decodingState.convertToByteBufferState();
+    doDecode(bbdState);
+
+    for (int i = 0; i < decodingState.outputs.length; i++) {
+      bbdState.outputs[i].get(decodingState.outputs[i],
+          decodingState.outputOffsets[i], decodingState.decodeLength);
+    }
+  }
+
+  // To link with the underlying data structure in the native layer.
+  // No get/set as only used by native codes.
+  private long nativeCoder;
+}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawEncoder.java
new file mode 100644
index 0000000..b2c7016
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawEncoder.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Abstract native raw encoder for all native coders to extend with.
+ */
+@InterfaceAudience.Private
+abstract class AbstractNativeRawEncoder extends RawErasureEncoder {
+  public static Logger LOG =
+      LoggerFactory.getLogger(AbstractNativeRawEncoder.class);
+
+  public AbstractNativeRawEncoder(ErasureCoderOptions coderOptions) {
+    super(coderOptions);
+  }
+
+  @Override
+  protected void doEncode(ByteBufferEncodingState encodingState) {
+    int[] inputOffsets = new int[encodingState.inputs.length];
+    int[] outputOffsets = new int[encodingState.outputs.length];
+    int dataLen = encodingState.inputs[0].remaining();
+
+    ByteBuffer buffer;
+    for (int i = 0; i < encodingState.inputs.length; ++i) {
+      buffer = encodingState.inputs[i];
+      inputOffsets[i] = buffer.position();
+    }
+
+    for (int i = 0; i < encodingState.outputs.length; ++i) {
+      buffer = encodingState.outputs[i];
+      outputOffsets[i] = buffer.position();
+    }
+
+    performEncodeImpl(encodingState.inputs, inputOffsets, dataLen,
+        encodingState.outputs, outputOffsets);
+  }
+
+  protected abstract void performEncodeImpl(
+          ByteBuffer[] inputs, int[] inputOffsets,
+          int dataLen, ByteBuffer[] outputs, int[] outputOffsets);
+
+  @Override
+  protected void doEncode(ByteArrayEncodingState encodingState) {
+    LOG.warn("convertToByteBufferState is invoked, " +
+        "not efficiently. Please use direct ByteBuffer inputs/outputs");
+
+    ByteBufferEncodingState bbeState = encodingState.convertToByteBufferState();
+    doEncode(bbeState);
+
+    for (int i = 0; i < encodingState.outputs.length; i++) {
+      bbeState.outputs[i].get(encodingState.outputs[i],
+          encodingState.outputOffsets[i], encodingState.encodeLength);
+    }
+  }
+
+  // To link with the underlying data structure in the native layer.
+  // No get/set as only used by native codes.
+  private long nativeCoder;
+}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteArrayDecodingState.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteArrayDecodingState.java
index 69c084d..7b3828b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteArrayDecodingState.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteArrayDecodingState.java
@@ -20,6 +20,8 @@
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 
+import java.nio.ByteBuffer;
+
 /**
  * A utility class that maintains decoding state during a decode call using
  * byte array inputs.
@@ -66,6 +68,27 @@
   }
 
   /**
+   * Convert to a ByteBufferDecodingState when it's backed by on-heap arrays.
+   */
+  ByteBufferDecodingState convertToByteBufferState() {
+    ByteBuffer[] newInputs = new ByteBuffer[inputs.length];
+    ByteBuffer[] newOutputs = new ByteBuffer[outputs.length];
+
+    for (int i = 0; i < inputs.length; i++) {
+      newInputs[i] = CoderUtil.cloneAsDirectByteBuffer(inputs[i],
+          inputOffsets[i], decodeLength);
+    }
+
+    for (int i = 0; i < outputs.length; i++) {
+      newOutputs[i] = ByteBuffer.allocateDirect(decodeLength);
+    }
+
+    ByteBufferDecodingState bbdState = new ByteBufferDecodingState(decoder,
+        decodeLength, erasedIndexes, newInputs, newOutputs);
+    return bbdState;
+  }
+
+  /**
    * Check and ensure the buffers are of the desired length.
    * @param buffers the buffers to check
    */
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteArrayEncodingState.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteArrayEncodingState.java
index 9d861d4..518356a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteArrayEncodingState.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteArrayEncodingState.java
@@ -20,6 +20,8 @@
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 
+import java.nio.ByteBuffer;
+
 /**
  * A utility class that maintains encoding state during an encode call using
  * byte array inputs.
@@ -62,6 +64,27 @@
   }
 
   /**
+   * Convert to a ByteBufferEncodingState when it's backed by on-heap arrays.
+   */
+  ByteBufferEncodingState convertToByteBufferState() {
+    ByteBuffer[] newInputs = new ByteBuffer[inputs.length];
+    ByteBuffer[] newOutputs = new ByteBuffer[outputs.length];
+
+    for (int i = 0; i < inputs.length; i++) {
+      newInputs[i] = CoderUtil.cloneAsDirectByteBuffer(inputs[i],
+          inputOffsets[i], encodeLength);
+    }
+
+    for (int i = 0; i < outputs.length; i++) {
+      newOutputs[i] = ByteBuffer.allocateDirect(encodeLength);
+    }
+
+    ByteBufferEncodingState bbeState = new ByteBufferEncodingState(encoder,
+        encodeLength, newInputs, newOutputs);
+    return bbeState;
+  }
+
+  /**
    * Check and ensure the buffers are of the desired length.
    * @param buffers the buffers to check
    */
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteBufferDecodingState.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteBufferDecodingState.java
index 5c5b0f6..4970c06 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteBufferDecodingState.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteBufferDecodingState.java
@@ -48,8 +48,20 @@
     checkOutputBuffers(outputs);
   }
 
+  ByteBufferDecodingState(RawErasureDecoder decoder,
+                         int decodeLength,
+                         int[] erasedIndexes,
+                         ByteBuffer[] inputs,
+                          ByteBuffer[] outputs) {
+    this.decoder = decoder;
+    this.decodeLength = decodeLength;
+    this.erasedIndexes = erasedIndexes;
+    this.inputs = inputs;
+    this.outputs = outputs;
+  }
+
   /**
-   * Convert to a ByteArrayEncodingState when it's backed by on-heap arrays.
+   * Convert to a ByteArrayDecodingState when it's backed by on-heap arrays.
    */
   ByteArrayDecodingState convertToByteArrayState() {
     int[] inputOffsets = new int[inputs.length];
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteBufferEncodingState.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteBufferEncodingState.java
index 7a10ac2..7d3124d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteBufferEncodingState.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteBufferEncodingState.java
@@ -46,6 +46,16 @@
     checkBuffers(outputs);
   }
 
+  ByteBufferEncodingState(RawErasureEncoder encoder,
+                          int encodeLength,
+                          ByteBuffer[] inputs,
+                          ByteBuffer[] outputs) {
+    this.encoder = encoder;
+    this.encodeLength = encodeLength;
+    this.inputs = inputs;
+    this.outputs = outputs;
+  }
+
   /**
    * Convert to a ByteArrayEncodingState when it's backed by on-heap arrays.
    */
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/CoderUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/CoderUtil.java
index aceb3c6..b22d44f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/CoderUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/CoderUtil.java
@@ -83,8 +83,6 @@
 
   /**
    * Initialize the output buffers with ZERO bytes.
-   * @param buffers
-   * @param dataLen
    */
   static void resetOutputBuffers(ByteBuffer[] buffers, int dataLen) {
     for (ByteBuffer buffer : buffers) {
@@ -94,8 +92,6 @@
 
   /**
    * Initialize the output buffers with ZERO bytes.
-   * @param buffers
-   * @param dataLen
    */
   static void resetOutputBuffers(byte[][] buffers, int[] offsets,
                                  int dataLen) {
@@ -127,10 +123,6 @@
 
   /**
    * Clone an input bytes array as direct ByteBuffer.
-   * @param input
-   * @param len
-   * @param offset
-   * @return direct ByteBuffer
    */
   static ByteBuffer cloneAsDirectByteBuffer(byte[] input, int offset, int len) {
     if (input == null) { // an input can be null, if erased or not to read
@@ -166,10 +158,6 @@
    * @return the first valid input
    */
   static <T> T findFirstValidInput(T[] inputs) {
-    if (inputs.length > 0 && inputs[0] != null) {
-      return inputs[0];
-    }
-
     for (T input : inputs) {
       if (input != null) {
         return input;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawDecoder.java
new file mode 100644
index 0000000..6b497cc
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawDecoder.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
+
+import java.nio.ByteBuffer;
+
+/**
+ * A Reed-Solomon raw decoder using Intel ISA-L library.
+ */
+@InterfaceAudience.Private
+public class NativeRSRawDecoder extends AbstractNativeRawDecoder {
+
+  static {
+    ErasureCodeNative.checkNativeCodeLoaded();
+  }
+
+  public NativeRSRawDecoder(ErasureCoderOptions coderOptions) {
+    super(coderOptions);
+    initImpl(coderOptions.getNumDataUnits(), coderOptions.getNumParityUnits());
+  }
+
+  @Override
+  protected void performDecodeImpl(ByteBuffer[] inputs, int[] inputOffsets,
+                                   int dataLen, int[] erased,
+                                   ByteBuffer[] outputs, int[] outputOffsets) {
+    decodeImpl(inputs, inputOffsets, dataLen, erased, outputs, outputOffsets);
+  }
+
+  @Override
+  public void release() {
+    destroyImpl();
+  }
+
+  private native void initImpl(int numDataUnits, int numParityUnits);
+
+  private native void decodeImpl(
+          ByteBuffer[] inputs, int[] inputOffsets, int dataLen, int[] erased,
+          ByteBuffer[] outputs, int[] outputOffsets);
+
+  private native void destroyImpl();
+
+}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawEncoder.java
new file mode 100644
index 0000000..297ed33
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawEncoder.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
+
+import java.nio.ByteBuffer;
+
+/**
+ * A Reed-Solomon raw encoder using Intel ISA-L library.
+ */
+@InterfaceAudience.Private
+public class NativeRSRawEncoder extends AbstractNativeRawEncoder {
+
+  static {
+    ErasureCodeNative.checkNativeCodeLoaded();
+  }
+
+  public NativeRSRawEncoder(ErasureCoderOptions coderOptions) {
+    super(coderOptions);
+    initImpl(coderOptions.getNumDataUnits(), coderOptions.getNumParityUnits());
+  }
+
+  @Override
+  protected void performEncodeImpl(
+          ByteBuffer[] inputs, int[] inputOffsets, int dataLen,
+          ByteBuffer[] outputs, int[] outputOffsets) {
+    encodeImpl(inputs, inputOffsets, dataLen, outputs, outputOffsets);
+  }
+
+  @Override
+  public void release() {
+    destroyImpl();
+  }
+
+  private native void initImpl(int numDataUnits, int numParityUnits);
+
+  private native void encodeImpl(ByteBuffer[] inputs, int[] inputOffsets,
+                                        int dataLen, ByteBuffer[] outputs,
+                                        int[] outputOffsets);
+
+  private native void destroyImpl();
+}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawErasureCoderFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawErasureCoderFactory.java
new file mode 100644
index 0000000..38997dd
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawErasureCoderFactory.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
+
+/**
+ * A raw coder factory for raw Reed-Solomon coder in native using Intel ISA-L.
+ */
+
+@InterfaceAudience.Private
+public class NativeRSRawErasureCoderFactory implements RawErasureCoderFactory {
+
+  @Override
+  public RawErasureEncoder createEncoder(ErasureCoderOptions coderOptions) {
+    return new NativeRSRawEncoder(coderOptions);
+  }
+
+  @Override
+  public RawErasureDecoder createDecoder(ErasureCoderOptions coderOptions) {
+    return new NativeRSRawDecoder(coderOptions);
+  }
+}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderFactory.java
index 6d94f00..05f228b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderFactory.java
@@ -30,14 +30,14 @@
 
   /**
    * Create raw erasure encoder.
-   * @param conf the configuration used to create the encoder
+   * @param coderOptions the options used to create the encoder
    * @return raw erasure encoder
    */
   RawErasureEncoder createEncoder(ErasureCoderOptions coderOptions);
 
   /**
    * Create raw erasure decoder.
-   * @param conf the configuration used to create the encoder
+   * @param coderOptions the options used to create the encoder
    * @return raw erasure decoder
    */
   RawErasureDecoder createDecoder(ErasureCoderOptions coderOptions);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java
index 96a6408..fdb47be 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java
@@ -17,12 +17,12 @@
  */
 package org.apache.hadoop.io.erasurecode.rawcoder.util;
 
-import org.apache.hadoop.classification.InterfaceAudience;
-
 import java.nio.ByteBuffer;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * Implementation of Galois field arithmetic with 2^p elements. The input must
  * be unsigned integers. It's ported from HDFS-RAID, slightly adapted.
diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_common.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_common.c
index 8126e9a..17e05db 100644
--- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_common.c
+++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_common.c
@@ -34,20 +34,31 @@
 
 void setCoder(JNIEnv* env, jobject thiz, IsalCoder* pCoder) {
   jclass clazz = (*env)->GetObjectClass(env, thiz);
-  jfieldID __coderState = (*env)->GetFieldID(env, clazz, "__native_coder", "J");
-  (*env)->SetLongField(env, thiz, __coderState, (jlong) pCoder);
+  jfieldID fid = (*env)->GetFieldID(env, clazz, "nativeCoder", "J");
+  if (fid == NULL) {
+    THROW(env, "java/lang/UnsatisfiedLinkError",
+                                    "Field nativeCoder not found");
+  }
+  (*env)->SetLongField(env, thiz, fid, (jlong) pCoder);
 }
 
 IsalCoder* getCoder(JNIEnv* env, jobject thiz) {
   jclass clazz = (*env)->GetObjectClass(env, thiz);
 
-  jfieldID __verbose = (*env)->GetFieldID(env, clazz, "__native_verbose", "J");
-  int verbose = (int)(*env)->GetIntField(env, thiz, __verbose);
+  jmethodID mid = (*env)->GetMethodID(env, clazz, "allowVerboseDump", "()Z");
+  if (mid == NULL) {
+    THROW(env, "java/lang/UnsatisfiedLinkError",
+                         "Method allowVerboseDump not found");
+  }
+  jboolean verbose = (*env)->CallBooleanMethod(env, thiz, mid);
 
-  jfieldID __coderState = (*env)->GetFieldID(env, clazz, "__native_coder", "J");
-  IsalCoder* pCoder = (IsalCoder*)(*env)->GetLongField(env,
-                                                       thiz, __coderState);
-  pCoder->verbose = verbose;
+  jfieldID fid = (*env)->GetFieldID(env, clazz, "nativeCoder", "J");
+  if (fid == NULL) {
+    THROW(env, "java/lang/UnsatisfiedLinkError",
+                                    "Field nativeCoder not found");
+  }
+  IsalCoder* pCoder = (IsalCoder*)(*env)->GetLongField(env, thiz, fid);
+  pCoder->verbose = (verbose == JNI_TRUE) ? 1 : 0;
 
   return pCoder;
 }
diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/org_apache_hadoop_io_erasurecode_ErasureCodeNative.h b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/org_apache_hadoop_io_erasurecode_ErasureCodeNative.h
deleted file mode 100644
index d8ff3a0..0000000
--- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/org_apache_hadoop_io_erasurecode_ErasureCodeNative.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class org_apache_hadoop_io_erasurecode_ErasureCodeNative */
-
-#ifndef _Included_org_apache_hadoop_io_erasurecode_ErasureCodeNative
-#define _Included_org_apache_hadoop_io_erasurecode_ErasureCodeNative
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     org_apache_hadoop_io_erasurecode_ErasureCodeNative
- * Method:    loadLibrary
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_ErasureCodeNative_loadLibrary
-  (JNIEnv *, jclass);
-
-/*
- * Class:     org_apache_hadoop_io_erasurecode_ErasureCodeNative
- * Method:    getLibraryName
- * Signature: ()Ljava/lang/String;
- */
-JNIEXPORT jstring JNICALL Java_org_apache_hadoop_io_erasurecode_ErasureCodeNative_getLibraryName
-  (JNIEnv *, jclass);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder.h b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder.h
deleted file mode 100644
index 40da4e1..0000000
--- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder */
-
-#ifndef _Included_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder
-#define _Included_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder
- * Method:    initImpl
- * Signature: (II[I)V
- */
-JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder_initImpl
-  (JNIEnv *, jobject, jint, jint);
-
-/*
- * Class:     org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder
- * Method:    decodeImpl
- * Signature: ([Ljava/nio/ByteBuffer;[II[I[Ljava/nio/ByteBuffer;[I)V
- */
-JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder_decodeImpl
-  (JNIEnv *, jobject, jobjectArray, jintArray, jint, jintArray, jobjectArray, jintArray);
-
-/*
- * Class:     org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder
- * Method:    destroyImpl
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder_destroyImpl
-  (JNIEnv *, jobject);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder.h b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder.h
deleted file mode 100644
index db094cf..0000000
--- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder */
-
-#ifndef _Included_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder
-#define _Included_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder
- * Method:    initImpl
- * Signature: (II[I)V
- */
-JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder_initImpl
-  (JNIEnv *, jobject, jint, jint);
-
-/*
- * Class:     org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder
- * Method:    encodeImpl
- * Signature: ([Ljava/nio/ByteBuffer;[II[Ljava/nio/ByteBuffer;[I)V
- */
-JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder_encodeImpl
-  (JNIEnv *, jobject, jobjectArray, jintArray, jint, jobjectArray, jintArray);
-
-/*
- * Class:     org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder
- * Method:    destroyImpl
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder_destroyImpl
-  (JNIEnv *, jobject);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java
new file mode 100644
index 0000000..edbb9df
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Test native raw Reed-solomon encoding and decoding.
+ */
+public class TestNativeRSRawCoder extends TestRSRawCoderBase {
+
+  @Before
+  public void setup() {
+    Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
+    this.encoderClass = NativeRSRawEncoder.class;
+    this.decoderClass = NativeRSRawDecoder.class;
+    setAllowDump(true);
+  }
+
+  @Test
+  public void testCoding_6x3_erasing_all_d() {
+    prepare(null, 6, 3, new int[]{0, 1, 2}, new int[0], true);
+    testCodingDoMixAndTwice();
+  }
+
+  @Test
+  public void testCoding_6x3_erasing_d0_d2() {
+    prepare(null, 6, 3, new int[] {0, 2}, new int[]{});
+    testCodingDoMixAndTwice();
+  }
+
+  @Test
+  public void testCoding_6x3_erasing_d0() {
+    prepare(null, 6, 3, new int[]{0}, new int[0]);
+    testCodingDoMixAndTwice();
+  }
+
+  @Test
+  public void testCoding_6x3_erasing_d2() {
+    prepare(null, 6, 3, new int[]{2}, new int[]{});
+    testCodingDoMixAndTwice();
+  }
+
+  @Test
+  public void testCoding_6x3_erasing_d0_p0() {
+    prepare(null, 6, 3, new int[]{0}, new int[]{0});
+    testCodingDoMixAndTwice();
+  }
+
+  @Test
+  public void testCoding_6x3_erasing_all_p() {
+    prepare(null, 6, 3, new int[0], new int[]{0, 1, 2});
+    testCodingDoMixAndTwice();
+  }
+
+  @Test
+  public void testCoding_6x3_erasing_p0() {
+    prepare(null, 6, 3, new int[0], new int[]{0});
+    testCodingDoMixAndTwice();
+  }
+
+  @Test
+  public void testCoding_6x3_erasing_p2() {
+    prepare(null, 6, 3, new int[0], new int[]{2});
+    testCodingDoMixAndTwice();
+  }
+
+  @Test
+  public void testCoding_6x3_erasure_p0_p2() {
+    prepare(null, 6, 3, new int[0], new int[]{0, 2});
+    testCodingDoMixAndTwice();
+  }
+
+  @Test
+  public void testCoding_6x3_erasing_d0_p0_p1() {
+    prepare(null, 6, 3, new int[]{0}, new int[]{0, 1});
+    testCodingDoMixAndTwice();
+  }
+
+  @Test
+  public void testCoding_6x3_erasing_d0_d2_p2() {
+    prepare(null, 6, 3, new int[]{0, 2}, new int[]{2});
+    testCodingDoMixAndTwice();
+  }
+
+  @Test
+  public void testCodingNegative_6x3_erasing_d2_d4() {
+    prepare(null, 6, 3, new int[]{2, 4}, new int[0]);
+    testCodingDoMixAndTwice();
+  }
+
+  @Test
+  public void testCodingNegative_6x3_erasing_too_many() {
+    prepare(null, 6, 3, new int[]{2, 4}, new int[]{0, 1});
+    testCodingWithErasingTooMany();
+  }
+
+  @Test
+  public void testCoding_10x4_erasing_d0_p0() {
+    prepare(null, 10, 4, new int[] {0}, new int[] {0});
+    testCodingDoMixAndTwice();
+  }
+}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java
new file mode 100644
index 0000000..9ce041c
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
+import org.junit.Assume;
+import org.junit.Before;
+
+/**
+ * Test raw Reed-solomon coder implemented in Java.
+ */
+public class TestRSRawCoderInteroperable1 extends TestRSRawCoderBase {
+
+  @Before
+  public void setup() {
+    Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
+
+    this.encoderClass = RSRawEncoder.class;
+    this.decoderClass = NativeRSRawDecoder.class;
+    setAllowDump(true);
+  }
+
+}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java
new file mode 100644
index 0000000..15531f3
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.erasurecode.rawcoder;
+
+import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
+import org.junit.Assume;
+import org.junit.Before;
+
+/**
+ * Test raw Reed-solomon coder implemented in Java.
+ */
+public class TestRSRawCoderInteroperable2 extends TestRSRawCoderBase {
+
+  @Before
+  public void setup() {
+    Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
+
+    this.encoderClass = NativeRSRawEncoder.class;
+    this.decoderClass = RSRawDecoder.class;
+    setAllowDump(true);
+  }
+
+}