MRUNIT-192 - MRUNIT-180 requires values be comparable (Bertrand via Brock)
diff --git a/pom.xml b/pom.xml
index 7935c41..7acf10d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -165,6 +165,7 @@
                 <exclude>BUILD.txt</exclude>
                 <exclude>CHANGES.txt</exclude>
                 <exclude>BIN-NOTICE.txt</exclude>
+                <exclude>src/test/resources/log4j.properties</exclude>
                 <exclude>.idea/</exclude>
                 <exclude>.git/</exclude>
                 <exclude>.gitignore</exclude>
diff --git a/src/main/java/org/apache/hadoop/mrunit/MapDriver.java b/src/main/java/org/apache/hadoop/mrunit/MapDriver.java
index 7526fbd..2c2c88e 100644
--- a/src/main/java/org/apache/hadoop/mrunit/MapDriver.java
+++ b/src/main/java/org/apache/hadoop/mrunit/MapDriver.java
@@ -68,7 +68,7 @@
 
   /**
    * Sets the counters object to use for this test.
-   * 
+   *
    * @param ctrs
    *          The counters object to use.
    */
@@ -82,10 +82,10 @@
     setCounters(ctrs);
     return this;
   }
-  
+
   /**
    * Set the Mapper instance to use with this test driver
-   * 
+   *
    * @param m
    *          the Mapper instance to use
    */
@@ -109,7 +109,7 @@
   /**
    * Configure {@link Mapper} to output with a real {@link OutputFormat}. Set
    * {@link InputFormat} to read output back in for use with run* methods
-   * 
+   *
    * @param outputFormatClass
    * @param inputFormatClass
    * @return this for fluent style
@@ -156,7 +156,7 @@
   /**
    * Returns a new MapDriver without having to specify the generic types on the
    * right hand side of the object create statement.
-   * 
+   *
    * @return new MapDriver
    */
   public static <K1, V1, K2, V2> MapDriver<K1, V1, K2, V2> newMapDriver() {
@@ -166,7 +166,7 @@
   /**
    * Returns a new MapDriver without having to specify the generic types on the
    * right hand side of the object create statement.
-   * 
+   *
    * @param mapper
    * @return new MapDriver
    */
diff --git a/src/main/java/org/apache/hadoop/mrunit/MapDriverBase.java b/src/main/java/org/apache/hadoop/mrunit/MapDriverBase.java
index 8e30d61..2691648 100644
--- a/src/main/java/org/apache/hadoop/mrunit/MapDriverBase.java
+++ b/src/main/java/org/apache/hadoop/mrunit/MapDriverBase.java
@@ -53,7 +53,7 @@
 
   /**
    * Sets the input key to send to the mapper
-   * 
+   *
    * @param key
    * @deprecated MRUNIT-64. Moved to list implementation to support multiple
    *             input (k, v)*. Replaced by {@link #setInput},
@@ -70,7 +70,7 @@
 
   /**
    * Sets the input value to send to the mapper
-   * 
+   *
    * @param val
    * @deprecated MRUNIT-64. Moved to list implementation to support multiple
    *             input (k, v)*. Replaced by {@link #setInput},
@@ -87,7 +87,7 @@
 
   /**
    * Sets the input to send to the mapper
-   * 
+   *
    */
   public void setInput(final K1 key, final V1 val) {
   	setInput(new Pair<K1, V1>(key, val));
@@ -95,7 +95,7 @@
 
   /**
    * Sets the input to send to the mapper
-   * 
+   *
    * @param inputRecord
    *          a (key, val) pair
    */
@@ -110,7 +110,7 @@
 
   /**
    * Adds an input to send to the mapper
-   * 
+   *
    * @param key
    * @param val
    */
@@ -120,7 +120,7 @@
 
   /**
    * Adds an input to send to the mapper
-   * 
+   *
    * @param input
    *          a (K, V) pair
    */
@@ -130,7 +130,7 @@
 
   /**
    * Adds list of inputs to send to the mapper
-   * 
+   *
    * @param inputs
    *          list of (K, V) pairs
    */
@@ -150,7 +150,7 @@
   /**
    * Expects an input of the form "key \t val" Forces the Mapper input types to
    * Text.
-   * 
+   *
    * @param input
    *          A string of the form "key \t val".
    * @deprecated No replacement due to lack of type safety and incompatibility
@@ -171,7 +171,7 @@
 
   /**
    * Identical to setInputKey() but with fluent programming style
-   * 
+   *
    * @return this
    * @deprecated MRUNIT-64. Moved to list implementation to support multiple
    *             input (k, v)*. Replaced by {@link #withInput} and
@@ -185,7 +185,7 @@
 
   /**
    * Identical to setInputValue() but with fluent programming style
-   * 
+   *
    * @param val
    * @return this
    * @deprecated MRUNIT-64. Moved to list implementation to support multiple
@@ -211,7 +211,7 @@
 
   /**
    * Identical to setInput() but returns self for fluent programming style
-   * 
+   *
    * @param inputRecord
    * @return this
    */
@@ -222,7 +222,7 @@
 
   /**
    * Identical to setInputFromString, but with a fluent programming style
-   * 
+   *
    * @param input
    *          A string of the form "key \t val". Trims any whitespace.
    * @return this
@@ -237,7 +237,7 @@
 
   /**
    * Identical to addAll() but returns self for fluent programming style
-   * 
+   *
    * @param inputRecords
    * @return this
    */
diff --git a/src/main/java/org/apache/hadoop/mrunit/MapReduceDriver.java b/src/main/java/org/apache/hadoop/mrunit/MapReduceDriver.java
index fb56010..e9f76c5 100644
--- a/src/main/java/org/apache/hadoop/mrunit/MapReduceDriver.java
+++ b/src/main/java/org/apache/hadoop/mrunit/MapReduceDriver.java
@@ -42,7 +42,7 @@
  * Reducer (without checking them), and will check the Reducer's outputs against
  * the expected results. This is designed to handle the (k, v)* -> (k, v)* case
  * from the Mapper/Reducer pair, representing a single unit test.
- * 
+ *
  * If a combiner is specified, then it will be run exactly once after the Mapper
  * and before the Reducer.
  */
@@ -86,7 +86,7 @@
 
   /**
    * Sets the counters object to use for this test.
-   * 
+   *
    * @param ctrs
    *          The counters object to use.
    */
@@ -104,7 +104,7 @@
 
   /**
    * Set the Mapper instance to use with this test driver
-   * 
+   *
    * @param m
    *          the Mapper instance to use
    */
@@ -128,7 +128,7 @@
 
   /**
    * Sets the reducer object to use for this test
-   * 
+   *
    * @param r
    *          The reducer object to use
    */
@@ -138,7 +138,7 @@
 
   /**
    * Identical to setReducer(), but with fluent programming style
-   * 
+   *
    * @param r
    *          The Reducer to use
    * @return this
@@ -158,7 +158,7 @@
 
   /**
    * Sets the reducer object to use as a combiner for this test
-   * 
+   *
    * @param c
    *          The combiner object to use
    */
@@ -168,7 +168,7 @@
 
   /**
    * Identical to setCombiner(), but with fluent programming style
-   * 
+   *
    * @param c
    *          The Combiner to use
    * @return this
@@ -189,7 +189,7 @@
   /**
    * Configure {@link Reducer} to output with a real {@link OutputFormat}. Set
    * {@link InputFormat} to read output back in for use with run* methods
-   * 
+   *
    * @param outputFormatClass
    * @param inputFormatClass
    * @return this for fluent style
@@ -267,7 +267,7 @@
 
       // Run the reduce phase.
       LOG.debug("Starting reduce phase with reducer: " + myReducer);
-      
+
       return new ReducePhaseRunner<K3, V3>()
           .runReduce(shuffle(mapOutputs),myReducer);
     } finally {
@@ -283,7 +283,7 @@
   /**
    * Returns a new MapReduceDriver without having to specify the generic types
    * on the right hand side of the object create statement.
-   * 
+   *
    * @return new MapReduceDriver
    */
   public static <K1, V1, K2, V2, K3, V3> MapReduceDriver<K1, V1, K2, V2, K3, V3> newMapReduceDriver() {
@@ -293,7 +293,7 @@
   /**
    * Returns a new MapReduceDriver without having to specify the generic types
    * on the right hand side of the object create statement.
-   * 
+   *
    * @param mapper
    *          passed to MapReduceDriver constructor
    * @param reducer
@@ -308,7 +308,7 @@
   /**
    * Returns a new MapReduceDriver without having to specify the generic types
    * on the right hand side of the object create statement.
-   * 
+   *
    * @param mapper
    *          passed to MapReduceDriver constructor
    * @param reducer
diff --git a/src/main/java/org/apache/hadoop/mrunit/MapReduceDriverBase.java b/src/main/java/org/apache/hadoop/mrunit/MapReduceDriverBase.java
index c09abc8..25cc274 100644
--- a/src/main/java/org/apache/hadoop/mrunit/MapReduceDriverBase.java
+++ b/src/main/java/org/apache/hadoop/mrunit/MapReduceDriverBase.java
@@ -62,7 +62,7 @@
 
   /**
    * Adds an input to send to the mapper
-   * 
+   *
    * @param key
    * @param val
    */
@@ -72,7 +72,7 @@
 
   /**
    * Adds an input to send to the Mapper
-   * 
+   *
    * @param input
    *          The (k, v) pair to add to the input list.
    */
@@ -82,7 +82,7 @@
 
   /**
    * Adds input to send to the mapper
-   * 
+   *
    * @param inputs
    *          List of (k, v) pairs to add to the input list
    */
@@ -95,7 +95,7 @@
   /**
    * Expects an input of the form "key \t val" Forces the Mapper input types to
    * Text.
-   * 
+   *
    * @param input
    *          A string of the form "key \t val". Trims any whitespace.
    * @deprecated No replacement due to lack of type safety and incompatibility
@@ -114,7 +114,7 @@
 
   /**
    * Identical to addInput() but returns self for fluent programming style
-   * 
+   *
    * @param key
    * @param val
    * @return this
@@ -127,7 +127,7 @@
 
   /**
    * Identical to addInput() but returns self for fluent programming style
-   * 
+   *
    * @param input
    *          The (k, v) pair to add
    * @return this
@@ -140,7 +140,7 @@
 
   /**
    * Identical to addInputFromString, but with a fluent programming style
-   * 
+   *
    * @param input
    *          A string of the form "key \t val". Trims any whitespace.
    * @return this
@@ -156,7 +156,7 @@
 
   /**
    * Identical to addAll() but returns self for fluent programming style
-   * 
+   *
    * @param inputs
    *          List of (k, v) pairs to add
    * @return this
@@ -215,16 +215,16 @@
   /**
    * Take the outputs from the Mapper, combine all values for the same key, and
    * sort them by key.
-   * 
+   *
    * @param mapOutputs
    *          An unordered list of (key, val) pairs from the mapper
    * @return the sorted list of (key, list(val))'s to present to the reducer
    */
   public List<Pair<K2, List<V2>>> shuffle(final List<Pair<K2, V2>> mapOutputs) {
-    
+
     final Comparator<K2> keyOrderComparator;
     final Comparator<K2> keyGroupComparator;
-    
+
     if (mapOutputs.isEmpty()) {
       return Collections.emptyList();
     }
@@ -241,7 +241,7 @@
     } else {
       keyOrderComparator = this.keyValueOrderComparator;
     }
-    
+
     // get the grouping comparator or work out from conf
     if (this.keyGroupComparator == null) {
       keyGroupComparator = conf.getOutputValueGroupingComparator();
@@ -257,12 +257,12 @@
     });
 
     // apply grouping comparator to create groups
-    final Map<K2, List<Pair<K2, V2>>> groupedByKey = 
+    final Map<K2, List<Pair<K2, V2>>> groupedByKey =
         new LinkedHashMap<K2, List<Pair<K2, V2>>>();
-    
+
     List<Pair<K2, V2>> groupedKeyList = null;
     Pair<K2,V2> previous = null;
-    
+
     for (final Pair<K2, V2> mapOutput : mapOutputs) {
       if (previous == null || keyGroupComparator
           .compare(previous.getFirst(), mapOutput.getFirst()) != 0) {
@@ -275,7 +275,7 @@
 
     // populate output list
     final List<Pair<K2, List<V2>>> outputKeyValuesList = new ArrayList<Pair<K2, List<V2>>>();
-    for (final Entry<K2, List<Pair<K2, V2>>> groupedByKeyEntry : 
+    for (final Entry<K2, List<Pair<K2, V2>>> groupedByKeyEntry :
             groupedByKey.entrySet()) {
 
       // create list to hold values for the grouped key
@@ -299,7 +299,7 @@
    * <LI>pre 0.20.1 API: {@link JobConf#setOutputValueGroupingComparator(Class)}
    * <LI>0.20.1+ API: {@link Job#setGroupingComparatorClass(Class)}
    * </UL>
-   * 
+   *
    * @param groupingComparator
    */
   public void setKeyGroupingComparator(
@@ -315,7 +315,7 @@
    * <LI>pre 0.20.1 API: {@link JobConf#setOutputKeyComparatorClass(Class)}
    * <LI>0.20.1+ API: {@link Job#setSortComparatorClass(Class)}
    * </UL>
-   * 
+   *
    * @param orderComparator
    */
   public void setKeyOrderComparator(final RawComparator<K2> orderComparator) {
@@ -326,7 +326,7 @@
   /**
    * Identical to {@link #setKeyGroupingComparator(RawComparator)}, but with a
    * fluent programming style
-   * 
+   *
    * @param groupingComparator
    *          Comparator to use in the shuffle stage for key grouping
    * @return this
@@ -339,7 +339,7 @@
   /**
    * Identical to {@link #setKeyOrderComparator(RawComparator)}, but with a
    * fluent programming style
-   * 
+   *
    * @param orderComparator
    *          Comparator to use in the shuffle stage for key value ordering
    * @return this
diff --git a/src/main/java/org/apache/hadoop/mrunit/PipelineMapReduceDriver.java b/src/main/java/org/apache/hadoop/mrunit/PipelineMapReduceDriver.java
index cc31ff0..9cf4e42 100644
--- a/src/main/java/org/apache/hadoop/mrunit/PipelineMapReduceDriver.java
+++ b/src/main/java/org/apache/hadoop/mrunit/PipelineMapReduceDriver.java
@@ -38,16 +38,16 @@
  * workflow, as well as a set of (key, value) pairs to pass in to the first
  * Mapper. You can also specify the outputs you expect to be sent to the final
  * Reducer in the pipeline.
- * 
+ *
  * By calling runTest(), the harness will deliver the input to the first Mapper,
  * feed the intermediate results to the first Reducer (without checking them),
  * and proceed to forward this data along to subsequent Mapper/Reducer jobs in
  * the pipeline until the final Reducer. The last Reducer's outputs are checked
  * against the expected results.
- * 
+ *
  * This is designed for slightly more complicated integration tests than the
  * MapReduceDriver, which is for smaller unit tests.
- * 
+ *
  * (K1, V1) in the type signature refer to the types associated with the inputs
  * to the first Mapper. (K2, V2) refer to the types associated with the final
  * Reducer's output. No intermediate types are specified.
@@ -83,7 +83,7 @@
 
   /**
    * Sets the counters object to use for this test.
-   * 
+   *
    * @param ctrs
    *          The counters object to use.
    */
@@ -102,7 +102,7 @@
   /**
    * Add a Mapper and Reducer instance to the pipeline to use with this test
    * driver
-   * 
+   *
    * @param m
    *          The Mapper instance to add to the pipeline
    * @param r
@@ -115,7 +115,7 @@
   /**
    * Add a Mapper and Reducer instance to the pipeline to use with this test
    * driver
-   * 
+   *
    * @param p
    *          The Mapper and Reducer instances to add to the pipeline
    */
@@ -126,7 +126,7 @@
   /**
    * Add a Mapper and Reducer instance to the pipeline to use with this test
    * driver using fluent style
-   * 
+   *
    * @param m
    *          The Mapper instance to use
    * @param r
@@ -141,7 +141,7 @@
   /**
    * Add a Mapper and Reducer instance to the pipeline to use with this test
    * driver using fluent style
-   * 
+   *
    * @param p
    *          The Mapper and Reducer instances to add to the pipeline
    */
@@ -160,7 +160,7 @@
 
   /**
    * Adds an input to send to the mapper
-   * 
+   *
    * @param key
    * @param val
    */
@@ -170,7 +170,7 @@
 
   /**
    * Adds list of inputs to send to the mapper
-   * 
+   *
    * @param inputs
    *          list of (K, V) pairs
    */
@@ -182,7 +182,7 @@
 
   /**
    * Identical to addInput() but returns self for fluent programming style
-   * 
+   *
    * @param key
    * @param val
    * @return this
@@ -195,7 +195,7 @@
 
   /**
    * Adds an input to send to the Mapper
-   * 
+   *
    * @param input
    *          The (k, v) pair to add to the input list.
    */
@@ -205,7 +205,7 @@
 
   /**
    * Identical to addInput() but returns self for fluent programming style
-   * 
+   *
    * @param input
    *          The (k, v) pair to add
    * @return this
@@ -219,7 +219,7 @@
   /**
    * Expects an input of the form "key \t val" Forces the Mapper input types to
    * Text.
-   * 
+   *
    * @param input
    *          A string of the form "key \t val". Trims any whitespace.
    * @deprecated No replacement due to lack of type safety and incompatibility
@@ -233,7 +233,7 @@
 
   /**
    * Identical to addInputFromString, but with a fluent programming style
-   * 
+   *
    * @param input
    *          A string of the form "key \t val". Trims any whitespace.
    * @return this
@@ -249,7 +249,7 @@
 
   /**
    * Identical to addAll() but returns self for fluent programming style
-   * 
+   *
    * @param inputRecords input key/value pairs
    * @return this
    */
@@ -331,7 +331,7 @@
   /**
    * Returns a new PipelineMapReduceDriver without having to specify the generic
    * types on the right hand side of the object create statement.
-   * 
+   *
    * @return new PipelineMapReduceDriver
    */
   public static <K1, V1, K2, V2> PipelineMapReduceDriver<K1, V1, K2, V2> newPipelineMapReduceDriver() {
@@ -341,7 +341,7 @@
   /**
    * Returns a new PipelineMapReduceDriver without having to specify the generic
    * types on the right hand side of the object create statement.
-   * 
+   *
    * @param pipeline
    *          passed to PipelineMapReduceDriver constructor
    * @return new PipelineMapReduceDriver
diff --git a/src/main/java/org/apache/hadoop/mrunit/ReduceDriver.java b/src/main/java/org/apache/hadoop/mrunit/ReduceDriver.java
index 8ef1a3b..fa9b706 100644
--- a/src/main/java/org/apache/hadoop/mrunit/ReduceDriver.java
+++ b/src/main/java/org/apache/hadoop/mrunit/ReduceDriver.java
@@ -67,7 +67,7 @@
 
   /**
    * Sets the counters object to use for this test.
-   * 
+   *
    * @param ctrs
    *          The counters object to use.
    */
@@ -84,7 +84,7 @@
 
   /**
    * Sets the reducer object to use for this test
-   * 
+   *
    * @param r
    *          The reducer object to use
    */
@@ -94,7 +94,7 @@
 
   /**
    * Identical to setReducer(), but with fluent programming style
-   * 
+   *
    * @param r
    *          The Reducer to use
    * @return this
@@ -112,7 +112,7 @@
   /**
    * Configure {@link Reducer} to output with a real {@link OutputFormat}. Set
    * {@link InputFormat} to read output back in for use with run* methods
-   * 
+   *
    * @param outputFormatClass
    * @param inputFormatClass
    * @return this for fluent style
@@ -145,7 +145,7 @@
       myReducer.close();
       return outputCollectable.getOutputs();
     } finally {
-      cleanupDistributedCache();  
+      cleanupDistributedCache();
     }
   }
 
@@ -157,7 +157,7 @@
   /**
    * Returns a new ReduceDriver without having to specify the generic types on
    * the right hand side of the object create statement.
-   * 
+   *
    * @return new ReduceDriver
    */
   public static <K1, V1, K2, V2> ReduceDriver<K1, V1, K2, V2> newReduceDriver() {
@@ -167,8 +167,8 @@
   /**
    * Returns a new ReduceDriver without having to specify the generic types on
    * the right hand side of the object create statement.
-   * 
-   * 
+   *
+   *
    * @param reducer
    *          passed to ReduceDriver constructor
    * @return new ReduceDriver
diff --git a/src/main/java/org/apache/hadoop/mrunit/ReduceDriverBase.java b/src/main/java/org/apache/hadoop/mrunit/ReduceDriverBase.java
index 86c46eb..c66087f 100644
--- a/src/main/java/org/apache/hadoop/mrunit/ReduceDriverBase.java
+++ b/src/main/java/org/apache/hadoop/mrunit/ReduceDriverBase.java
@@ -34,7 +34,7 @@
  * sent to the Reducer (as if they came from a Mapper), and outputs you expect
  * to be sent by the Reducer to the collector. By calling runTest(), the harness
  * will deliver the input to the Reducer and will check its outputs against the
- * expected results. 
+ * expected results.
  */
 public abstract class ReduceDriverBase<K1, V1, K2, V2, T extends ReduceDriverBase<K1, V1, K2, V2, T>>
     extends TestDriver<K1, V1, K2, V2, T> {
@@ -53,7 +53,7 @@
 
   /**
    * Returns a list of values.
-   * 
+   *
    * @return List of values
    * @deprecated MRUNIT-64. Moved to list implementation to support multiple
    *             input (k, v*)*. Replaced by {@link #getInputValues(Object)}
@@ -65,7 +65,7 @@
 
   /**
    * Returns a list of values for the given key
-   * 
+   *
    * @param key
    * @return List for the given key, or null if key does not exist
    */
@@ -80,7 +80,7 @@
 
   /**
    * Sets the input key to send to the Reducer
-   * 
+   *
    * @deprecated MRUNIT-64. Moved to list implementation to support multiple
    *             input (k, v*)*. Replaced by {@link #setInput},
    *             {@link #addInput}, and {@link #addAll}
@@ -92,7 +92,7 @@
 
   /**
    * adds an input value to send to the reducer
-   * 
+   *
    * @param val
    * @deprecated MRUNIT-64. Moved to list implementation to support multiple
    *             input (k, v*)*. Replaced by {@link #setInput},
@@ -105,7 +105,7 @@
 
   /**
    * Sets the input values to send to the reducer; overwrites existing ones
-   * 
+   *
    * @param values
    * @deprecated MRUNIT-64. Moved to list implementation to support multiple
    *             input (k, v*)*. Replaced by {@link #setInput},
@@ -119,7 +119,7 @@
 
   /**
    * Adds a set of input values to send to the reducer
-   * 
+   *
    * @param values
    * @deprecated MRUNIT-64. Moved to list implementation to support multiple
    *             input (k, v*)*. Replaced by {@link #setInput},
@@ -134,14 +134,14 @@
 
   /**
    * Sets the input to send to the reducer
-   * 
+   *
    * @param key
    * @param values
    */
   public void setInput(final K1 key, final List<V1> values) {
     setInputKey(key);
     setInputValues(values);
-    
+
     clearInput();
     addInput(key, values);
   }
@@ -155,7 +155,7 @@
 
   /**
    * Add input (K, V*) to send to the Reducer
-   * 
+   *
    * @param key
    *          The key too add
    * @param values
@@ -173,7 +173,7 @@
 
   /**
    * Add input (K, V*) to send to the Reducer
-   * 
+   *
    * @param input
    *          input pair
    */
@@ -183,7 +183,7 @@
 
   /**
    * Adds input to send to the Reducer
-   * 
+   *
    * @param inputs
    *          list of (K, V*) pairs
    */
@@ -196,7 +196,7 @@
   /**
    * Expects an input of the form "key \t val, val, val..." Forces the Reducer
    * input types to Text.
-   * 
+   *
    * @param input
    *          A string of the form "key \t val,val,val". Trims any whitespace.
    * @deprecated No replacement due to lack of type safety and incompatibility
@@ -218,7 +218,7 @@
 
   /**
    * Identical to setInputKey() but with fluent programming style
-   * 
+   *
    * @return this
    * @deprecated MRUNIT-64. Moved to list implementation to support multiple
    *             input (k, v*)*. Replaced by {@link #withInput(Object, List)},
@@ -232,7 +232,7 @@
 
   /**
    * Identical to addInputValue() but with fluent programming style
-   * 
+   *
    * @param val
    * @return this
    * @deprecated MRUNIT-64. Moved to list implementation to support multiple
@@ -247,7 +247,7 @@
 
   /**
    * Identical to addInputValues() but with fluent programming style
-   * 
+   *
    * @param values
    * @return this
    * @deprecated MRUNIT-64. Moved to list implementation to support multiple
@@ -262,7 +262,7 @@
 
   /**
    * Identical to setInput() but returns self for fluent programming style
-   * 
+   *
    * @return this
    */
   public T withInput(final K1 key,
@@ -273,7 +273,7 @@
 
   /**
    * Identical to setInput, but with a fluent programming style
-   * 
+   *
    * @param input
    *          A string of the form "key \t val". Trims any whitespace.
    * @return this
@@ -288,7 +288,7 @@
 
   /**
    * Identical to addInput() but returns self for fluent programming style
-   * 
+   *
    * @param input
    * @return this
    */
@@ -299,7 +299,7 @@
 
   /**
    * Identical to addAll() but returns self for fluent programming style
-   * 
+   *
    * @param inputs
    * @return this
    */
@@ -321,7 +321,7 @@
     if (inputs == null || inputs.isEmpty()) {
       throw new IllegalStateException("No input was provided");
     }
-    
+
     if (reducer == null) {
       throw new IllegalStateException("No Reducer class was provided");
     }
diff --git a/src/main/java/org/apache/hadoop/mrunit/TestDriver.java b/src/main/java/org/apache/hadoop/mrunit/TestDriver.java
index 5c43176..be4d67e 100644
--- a/src/main/java/org/apache/hadoop/mrunit/TestDriver.java
+++ b/src/main/java/org/apache/hadoop/mrunit/TestDriver.java
@@ -37,7 +37,7 @@
 import org.apache.hadoop.mrunit.internal.output.MockMultipleOutputs;
 import org.apache.hadoop.mrunit.internal.util.DistCacheUtils;
 import org.apache.hadoop.mrunit.internal.util.Errors;
-import org.apache.hadoop.mrunit.internal.util.PairComparator;
+import org.apache.hadoop.mrunit.internal.util.PairEquality;
 import org.apache.hadoop.mrunit.internal.util.StringUtils;
 import org.apache.hadoop.mrunit.types.Pair;
 
@@ -675,104 +675,169 @@
    */
   protected void validate(final List<Pair<K2, V2>> outputs,
       final boolean orderMatters) {
+    // expected nothing and got nothing, everything is fine
+    if (outputs.isEmpty() && expectedOutputs.isEmpty()) {
+        return;
+    }
 
     final Errors errors = new Errors(LOG);
-
-    if (!outputs.isEmpty()) {
-      // were we supposed to get output in the first place?
-      if (expectedOutputs.isEmpty()) {
-        errors.record("Expected no outputs; got %d outputs.", outputs.size());
-      }
-      // check that user's key and value writables implement equals, hashCode, toString
-      checkOverrides(outputs.get(0));
+    // expected nothing but got something
+    if (!outputs.isEmpty() && expectedOutputs.isEmpty()) {
+        errors.record("Expected no output; got %d output(s).", outputs.size());
+        errors.assertNone();
+    }
+    // expected something but got nothing
+    if (outputs.isEmpty() && !expectedOutputs.isEmpty()) {
+        errors.record("Expected %d output(s); got no output.", expectedOutputs.size());
+        errors.assertNone();
     }
 
-    final Comparator<Pair<K2, V2>> pairComparator = new PairComparator<K2, V2>(
-      keyComparator, valueComparator);
-    final Map<Pair<K2, V2>, List<Integer>> expectedPositions = buildPositionMap(
-      expectedOutputs, pairComparator);
-    final Map<Pair<K2, V2>, List<Integer>> actualPositions = buildPositionMap(
-      outputs, pairComparator);
+    // now, the smart test needs to be done
+    // check that user's key and value writables implement equals, hashCode, toString
+    checkOverrides(outputs, expectedOutputs);
 
-    for (final Pair<K2, V2> output : expectedPositions.keySet()) {
-      final List<Integer> expectedPositionList = expectedPositions.get(output);
-      final List<Integer> actualPositionList = actualPositions.get(output);
-      if (actualPositionList != null) {
-        // the expected value has been seen - check positions
-        final int expectedPositionsCount = expectedPositionList.size();
-        final int actualPositionsCount = actualPositionList.size();
-        if (orderMatters) {
-          // order is important, so the positions must match exactly
-          if (expectedPositionList.equals(actualPositionList)) {
-            LOG.debug(String.format("Matched expected output %s at "
-                + "positions %s", output, expectedPositionList.toString()));
-          } else {
-            int i = 0;
-            while (expectedPositionsCount > i || actualPositionsCount > i) {
-              if (expectedPositionsCount > i && actualPositionsCount > i) {
-                final int expectedPosition = expectedPositionList.get(i);
-                final int actualPosition = actualPositionList.get(i);
-                if (expectedPosition == actualPosition) {
-                  LOG.debug(String.format("Matched expected output %s at "
-                      + "position %d", output, expectedPosition));
-                } else {
-                  errors.record("Matched expected output %s but at "
-                      + "incorrect position %d (expected position %d)", output,
-                      actualPosition, expectedPosition);
-                }
-              } else if (expectedPositionsCount > i) {
-                // not ok, value wasn't seen enough times
-                errors.record("Missing expected output %s at position %d.",
-                    output, expectedPositionList.get(i));
-              } else {
-                // not ok, value seen too many times
-                errors.record("Received unexpected output %s at position %d.",
-                    output, actualPositionList.get(i));
-              }
-              i++;
-            }
-          }
-        } else {
-          // order is unimportant, just check that the count of times seen match
-          if (expectedPositionsCount == actualPositionsCount) {
-            // ok, counts match
-            LOG.debug(String.format("Matched expected output %s in "
-                + "%d positions", output, expectedPositionsCount));
-          } else if (expectedPositionsCount > actualPositionsCount) {
-            // not ok, value wasn't seen enough times
-            for (int i = 0; i < expectedPositionsCount - actualPositionsCount; i++) {
-              errors.record("Missing expected output %s", output);
-            }
-          } else {
-            // not ok, value seen too many times
-            for (int i = 0; i < actualPositionsCount - expectedPositionsCount; i++) {
-              errors.record("Received unexpected output %s", output);
-            }
-          }
+    final PairEquality<K2, V2> equality = new PairEquality<K2, V2>(
+            keyComparator, valueComparator);
+    if (orderMatters) {
+        validateWithOrder(outputs, errors, equality);
+    } else {
+        validateWithoutOrder(outputs, errors, equality);
+    }
+
+    // if there are errors, it might be due to types and not clear from the message
+    if(!errors.isEmpty()) {
+      Class<?> outputKeyClass = null;
+      Class<?> outputValueClass = null;
+      Class<?> expectedKeyClass = null;
+      Class<?> expectedValueClass = null;
+
+      for (Pair<K2, V2> output : outputs) {
+        if (output.getFirst() != null) {
+          outputKeyClass = output.getFirst().getClass();
         }
-        actualPositions.remove(output);
-      } else {
-        // the expected value was not found anywhere - output errors
-        checkTypesAndLogError(outputs, output, expectedPositionList,
-            orderMatters, errors, "Missing expected output");
+        if (output.getSecond() != null) {
+          outputValueClass = output.getSecond().getClass();
+        }
+        if (outputKeyClass != null && outputValueClass != null) {
+          break;
+        }
+      }
+
+      for (Pair<K2, V2> expected : expectedOutputs) {
+        if (expected.getFirst() != null) {
+          expectedKeyClass = expected.getFirst().getClass();
+        }
+        if (expected.getSecond() != null) {
+          expectedValueClass = expected.getSecond().getClass();
+        }
+        if (expectedKeyClass != null && expectedValueClass != null) {
+          break;
+        }
+      }
+
+      if (outputKeyClass != null && expectedKeyClass !=null
+          && !outputKeyClass.equals(expectedKeyClass)) {
+        errors.record("Mismatch in key class: expected: %s actual: %s",
+            expectedKeyClass, outputKeyClass);
+      }
+
+      if (outputValueClass != null && expectedValueClass !=null
+          && !outputValueClass.equals(expectedValueClass)) {
+        errors.record("Mismatch in value class: expected: %s actual: %s",
+            expectedValueClass, outputValueClass);
       }
     }
-
-    for (final Pair<K2, V2> output : actualPositions.keySet()) {
-      // anything left in actual set is unexpected
-      checkTypesAndLogError(outputs, output, actualPositions.get(output),
-          orderMatters, errors, "Received unexpected output");
-    }
-
     errors.assertNone();
   }
 
-  private void checkOverrides(final Pair<K2,V2> outputPair) {
-    checkOverride(outputPair.getFirst().getClass());
-    checkOverride(outputPair.getSecond().getClass());
+  private void validateWithoutOrder(final List<Pair<K2, V2>> outputs,
+      final Errors errors, final PairEquality<K2, V2> equality) {
+    Set<Integer> verifiedExpecteds = new HashSet<Integer>();
+    Set<Integer> unverifiedOutputs = new HashSet<Integer>();
+    for (int i = 0; i < outputs.size(); i++) {
+        Pair<K2, V2> output = outputs.get(i);
+        boolean found = false;
+        for (int j = 0; j < expectedOutputs.size(); j++) {
+            if (verifiedExpecteds.contains(j)) {
+                continue;
+            }
+            Pair<K2, V2> expected = expectedOutputs.get(j);
+            if (equality.isTrueFor(output, expected)) {
+                found = true;
+                verifiedExpecteds.add(j);
+                LOG.debug(String.format("Matched expected output %s no %d at "
+                        + "position %d", output, j, i));
+                break;
+            }
+        }
+        if (!found) {
+            unverifiedOutputs.add(i);
+        }
+    }
+    for (int j = 0; j < expectedOutputs.size(); j++) {
+        if (!verifiedExpecteds.contains(j)) {
+            errors.record("Missing expected output %s", expectedOutputs.get(j));
+        }
+    }
+    for (int i = 0; i < outputs.size(); i++) {
+        if (unverifiedOutputs.contains(i)) {
+            errors.record("Received unexpected output %s", outputs.get(i));
+        }
+    }
+  }
+
+  private void validateWithOrder(final List<Pair<K2, V2>> outputs,
+      final Errors errors, final PairEquality<K2, V2> equality) {
+    int i = 0;
+    for (i = 0; i < Math.min(outputs.size(),expectedOutputs.size()); i++) {
+        Pair<K2, V2> output = outputs.get(i);
+        Pair<K2, V2> expected = expectedOutputs.get(i);
+        if (equality.isTrueFor(output, expected)) {
+            LOG.debug(String.format("Matched expected output %s at "
+                    + "position %d", expected, i));
+        } else {
+            errors.record("Missing expected output %s at position %d, got %s.",
+                    expected, i, output);
+        }
+    }
+    for(int j=i; j < outputs.size(); j++) {
+        errors.record("Received unexpected output %s at position %d.",
+                outputs.get(j), j);
+    }
+    for(int j=i; j < expectedOutputs.size(); j++) {
+        errors.record("Missing expected output %s at position %d.",
+                expectedOutputs.get(j), j);
+    }
+  }
+
+  private void checkOverrides(final List<Pair<K2,V2>> outputPairs, final List<Pair<K2,V2>> expectedOutputPairs) {
+    Class<?> keyClass = null;
+    Class<?> valueClass = null;
+    // key or value could be null, try to find a class
+    for (Pair<K2,V2> pair : outputPairs) {
+        if (keyClass == null && pair.getFirst() != null) {
+            keyClass = pair.getFirst().getClass();
+        }
+        if (valueClass == null && pair.getSecond() != null) {
+        	valueClass = pair.getSecond().getClass();
+        }
+    }
+    for (Pair<K2,V2> pair : expectedOutputPairs) {
+        if (keyClass == null && pair.getFirst() != null) {
+            keyClass = pair.getFirst().getClass();
+        }
+        if (valueClass == null && pair.getSecond() != null) {
+        	valueClass = pair.getSecond().getClass();
+        }
+    }
+    checkOverride(keyClass);
+    checkOverride(valueClass);
   }
 
   private void checkOverride(final Class<?> clazz) {
+    if (clazz == null) {
+        return;
+    }
     try {
       if (clazz.getMethod("equals", Object.class).getDeclaringClass() != clazz) {
         LOG.warn(clazz.getCanonicalName() + ".equals(Object) " +
@@ -794,41 +859,6 @@
     }
   }
 
-  private void checkTypesAndLogError(final List<Pair<K2, V2>> outputs,
-      final Pair<K2, V2> output, final List<Integer> positions,
-      final boolean orderMatters, final Errors errors,
-      final String errorString) {
-    for (final int pos : positions) {
-      String msg = null;
-      if (expectedOutputs.size() > pos && outputs.size() > pos) {
-        final Pair<K2, V2> actual = outputs.get(pos);
-        final Pair<K2, V2> expected = expectedOutputs.get(pos);
-        final Class<?> actualKeyClass = actual.getFirst().getClass();
-        final Class<?> actualValueClass = actual.getSecond().getClass();
-        final Class<?> expectedKeyClass = expected.getFirst().getClass();
-        final Class<?> expectedValueClass = expected.getSecond().getClass();
-        if (actualKeyClass != expectedKeyClass) {
-          msg = String.format(
-              "%s %s: Mismatch in key class: expected: %s actual: %s",
-              errorString, output, expectedKeyClass, actualKeyClass);
-        } else if (actualValueClass != expectedValueClass) {
-          msg = String.format(
-              "%s %s: Mismatch in value class: expected: %s actual: %s",
-              errorString, output, expectedValueClass, actualValueClass);
-        }
-      }
-      if (msg == null) {
-        if (orderMatters) {
-          msg = String
-              .format("%s %s at position %d.", errorString, output, pos);
-        } else {
-          msg = String.format("%s %s", errorString, output);
-        }
-      }
-      errors.record(msg);
-    }
-  }
-
   private Map<Pair<K2, V2>, List<Integer>> buildPositionMap(
       final List<Pair<K2, V2>> values, Comparator<Pair<K2, V2>> comparator) {
     final Map<Pair<K2, V2>, List<Integer>> valuePositions =
diff --git a/src/main/java/org/apache/hadoop/mrunit/internal/counters/CounterWrapper.java b/src/main/java/org/apache/hadoop/mrunit/internal/counters/CounterWrapper.java
index b235060..f151f2a 100644
--- a/src/main/java/org/apache/hadoop/mrunit/internal/counters/CounterWrapper.java
+++ b/src/main/java/org/apache/hadoop/mrunit/internal/counters/CounterWrapper.java
@@ -46,7 +46,7 @@
 
   /**
    * Wrap old counter object
-   * 
+   *
    * @param counters
    */
   public CounterWrapper(final org.apache.hadoop.mapred.Counters counters) {
@@ -55,7 +55,7 @@
 
   /**
    * Wrap new counter object
-   * 
+   *
    * @param counters
    */
   public CounterWrapper(final org.apache.hadoop.mapreduce.Counters counters) {
@@ -64,7 +64,7 @@
 
   /**
    * Get counter value based on Enumeration
-   * 
+   *
    * @param e
    * @return
    */
@@ -78,7 +78,7 @@
 
   /**
    * Get counter value based on name
-   * 
+   *
    * @param group
    * @param name
    * @return
diff --git a/src/main/java/org/apache/hadoop/mrunit/internal/io/Serialization.java b/src/main/java/org/apache/hadoop/mrunit/internal/io/Serialization.java
index 8301d41..c3d2842 100644
--- a/src/main/java/org/apache/hadoop/mrunit/internal/io/Serialization.java
+++ b/src/main/java/org/apache/hadoop/mrunit/internal/io/Serialization.java
@@ -44,7 +44,7 @@
    * the serialization class may or may not copy the orig object into the copy
    * object based on the contract on
    * org.apache.hadoop.io.serializer.Deserializer.deserialize
-   * 
+   *
    * @param orig
    * @param copy
    *          if null always returns a new object, if not null may or may not
@@ -89,7 +89,7 @@
 
   /**
    * Creates a new copy of the orig object
-   * 
+   *
    * @param orig
    * @return a new copy of the orig object
    */
@@ -99,7 +99,7 @@
 
   /**
    * Creates a new copy of the orig object
-   * 
+   *
    * @param orig
    * @param conf
    *          new Configuration object to use
diff --git a/src/main/java/org/apache/hadoop/mrunit/internal/mapred/MockReporter.java b/src/main/java/org/apache/hadoop/mrunit/internal/mapred/MockReporter.java
index e28b48d..ebe7445 100644
--- a/src/main/java/org/apache/hadoop/mrunit/internal/mapred/MockReporter.java
+++ b/src/main/java/org/apache/hadoop/mrunit/internal/mapred/MockReporter.java
@@ -37,7 +37,7 @@
   public MockReporter(final ReporterType kind, final Counters ctrs) {
     this(kind, ctrs, null);
   }
-  
+
   public MockReporter(final ReporterType kind, final Counters ctrs,
       final Path mapInputPath) {
     typ = kind;
diff --git a/src/main/java/org/apache/hadoop/mrunit/internal/mapreduce/AbstractMockContextWrapper.java b/src/main/java/org/apache/hadoop/mrunit/internal/mapreduce/AbstractMockContextWrapper.java
index 521e732..882562c 100644
--- a/src/main/java/org/apache/hadoop/mrunit/internal/mapreduce/AbstractMockContextWrapper.java
+++ b/src/main/java/org/apache/hadoop/mrunit/internal/mapreduce/AbstractMockContextWrapper.java
@@ -36,7 +36,7 @@
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
-abstract class AbstractMockContextWrapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT, CONTEXT 
+abstract class AbstractMockContextWrapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT, CONTEXT
 extends TaskInputOutputContext<KEYIN, VALUEIN, KEYOUT, VALUEOUT>> {
 
   protected CONTEXT context;
@@ -44,7 +44,7 @@
   protected final Configuration configuration;
   protected OutputCollectable<KEYOUT, VALUEOUT> outputCollectable;
 
-  public AbstractMockContextWrapper(final Configuration configuration, 
+  public AbstractMockContextWrapper(final Configuration configuration,
       final MockOutputCreator<KEYOUT, VALUEOUT> mockOutputCreator) {
     this.mockOutputCreator = mockOutputCreator;
     this.configuration = configuration;
@@ -55,7 +55,7 @@
       final TaskInputOutputContext context,
       final ContextDriver contextDriver,
       final MockOutputCreator mockOutputCreator) {
-        
+
     when(context.getCounter((Enum) any())).thenAnswer(new Answer<Counter>() {
       @Override
       public Counter answer(final InvocationOnMock invocation) {
@@ -84,7 +84,7 @@
           final Object[] args = invocation.getArguments();
           try {
             if(outputCollectable == null) {
-              outputCollectable = mockOutputCreator.createMapReduceOutputCollectable(contextDriver.getConfiguration(), 
+              outputCollectable = mockOutputCreator.createMapReduceOutputCollectable(contextDriver.getConfiguration(),
                   contextDriver.getOutputSerializationConfiguration(), context);
             }
             outputCollectable.collect((KEYOUT)args[0], (VALUEOUT)args[1]);
@@ -94,42 +94,42 @@
           return null;
         }
       }).when(context).write(any(), any());
-      
-      
+
+
       when(context.getSymlink()).thenAnswer(new Answer<Boolean>() {
         @Override
         @SuppressWarnings("deprecation")
         public Boolean answer(InvocationOnMock invocation) throws Throwable {
           return DistributedCache.getSymlink(configuration);
-        }      
+        }
       });
       when(context.getCacheArchives()).thenAnswer(new Answer<URI[]>() {
         @Override
         @SuppressWarnings("deprecation")
         public URI[] answer(InvocationOnMock invocation) throws Throwable {
           return DistributedCache.getCacheArchives(configuration);
-        }      
+        }
       });
       when(context.getCacheFiles()).thenAnswer(new Answer<URI[]>() {
         @Override
         @SuppressWarnings("deprecation")
         public URI[] answer(InvocationOnMock invocation) throws Throwable {
           return DistributedCache.getCacheFiles(configuration);
-        }      
+        }
       });
       when(context.getLocalCacheArchives()).thenAnswer(new Answer<Path[]>() {
         @Override
         @SuppressWarnings("deprecation")
         public Path[] answer(InvocationOnMock invocation) throws Throwable {
           return DistributedCache.getLocalCacheArchives(configuration);
-        }      
+        }
       });
       when(context.getLocalCacheFiles()).thenAnswer(new Answer<Path[]>() {
         @Override
         @SuppressWarnings("deprecation")
         public Path[] answer(InvocationOnMock invocation) throws Throwable {
           return DistributedCache.getLocalCacheFiles(configuration);
-        }      
+        }
       });
 
       when(context.getNumReduceTasks()).thenAnswer(new Answer<Integer>() {
@@ -155,7 +155,7 @@
     }
     return outputCollectable.getOutputs();
   }
-  
+
   public CONTEXT getMockContext() {
     return context;
   }
diff --git a/src/main/java/org/apache/hadoop/mrunit/internal/mapreduce/MockMapContextWrapper.java b/src/main/java/org/apache/hadoop/mrunit/internal/mapreduce/MockMapContextWrapper.java
index 40112fa..d6a6b0f 100644
--- a/src/main/java/org/apache/hadoop/mrunit/internal/mapreduce/MockMapContextWrapper.java
+++ b/src/main/java/org/apache/hadoop/mrunit/internal/mapreduce/MockMapContextWrapper.java
@@ -43,7 +43,7 @@
  * it send the results back to us, etc. But since Mapper.Context is an inner
  * class of Mapper, we need to put any subclasses of Mapper.Context in a
  * subclass of Mapper.
- * 
+ *
  * This wrapper class exists for that purpose.
  */
 public class MockMapContextWrapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>
@@ -54,9 +54,9 @@
 
   protected final List<Pair<KEYIN, VALUEIN>> inputs;
   protected final MapDriver<KEYIN, VALUEIN, KEYOUT, VALUEOUT> driver;
-  
+
   protected Pair<KEYIN, VALUEIN> currentKeyValue;
-  
+
   public MockMapContextWrapper(
       final Configuration configuration,
       final List<Pair<KEYIN, VALUEIN>> inputs,
diff --git a/src/main/java/org/apache/hadoop/mrunit/internal/mapreduce/MockReduceContextWrapper.java b/src/main/java/org/apache/hadoop/mrunit/internal/mapreduce/MockReduceContextWrapper.java
index bc0d282..3ae98ae 100644
--- a/src/main/java/org/apache/hadoop/mrunit/internal/mapreduce/MockReduceContextWrapper.java
+++ b/src/main/java/org/apache/hadoop/mrunit/internal/mapreduce/MockReduceContextWrapper.java
@@ -42,7 +42,7 @@
  * it send the results back to us, etc. But since Reducer.Context is an inner
  * class of Reducer, we need to put any subclasses of Reducer.Context in a
  * subclass of Reducer.
- * 
+ *
  * This wrapper class exists for that purpose.
  */
 public class MockReduceContextWrapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>
diff --git a/src/main/java/org/apache/hadoop/mrunit/internal/output/MockOutputCreator.java b/src/main/java/org/apache/hadoop/mrunit/internal/output/MockOutputCreator.java
index 1ffb9c6..7322a5b 100644
--- a/src/main/java/org/apache/hadoop/mrunit/internal/output/MockOutputCreator.java
+++ b/src/main/java/org/apache/hadoop/mrunit/internal/output/MockOutputCreator.java
@@ -72,7 +72,7 @@
       Configuration configuration,
       Configuration outputCopyingOrInputFormatConfiguration) throws IOException {
     outputCopyingOrInputFormatConfiguration = outputCopyingOrInputFormatConfiguration == null ? configuration
-        : outputCopyingOrInputFormatConfiguration;    
+        : outputCopyingOrInputFormatConfiguration;
     if (mapredOutputFormatClass != null) {
       return new MockMapredOutputFormat<K, V>(new JobConf(configuration),
           mapredOutputFormatClass, mapredInputFormatClass, new JobConf(
diff --git a/src/main/java/org/apache/hadoop/mrunit/internal/util/DistCacheUtils.java b/src/main/java/org/apache/hadoop/mrunit/internal/util/DistCacheUtils.java
index f150573..f97c496 100644
--- a/src/main/java/org/apache/hadoop/mrunit/internal/util/DistCacheUtils.java
+++ b/src/main/java/org/apache/hadoop/mrunit/internal/util/DistCacheUtils.java
@@ -43,9 +43,9 @@
   }
 
   /**
-   * Attempt to create a URI from a string path. First tries to load as a 
+   * Attempt to create a URI from a string path. First tries to load as a
    * class resource, and failing that, loads as a File.
-   *  
+   *
    * @param path path to resource
    * @return the uri of the resource
    */
@@ -68,7 +68,7 @@
   }
 
   /**
-   * Creates a comma separated list from a list of Path objects. 
+   * Creates a comma separated list from a list of Path objects.
    * Method borrowed from Hadoop's TaskDistributedCacheManager
    */
   public static String stringifyPathList(List<Path> p){
@@ -85,12 +85,12 @@
 
   /**
    * Create a randomly named temporary directory
-   * 
+   *
    * @return the file handle of the directory
    * @throws IOException
    */
   public static File createTempDirectory() throws IOException {
-    File tmpDir = new File(System.getProperty("java.io.tmpdir"), 
+    File tmpDir = new File(System.getProperty("java.io.tmpdir"),
         "mrunit-" + UUID.randomUUID().toString());
     LOG.debug("Creating temp directory " + tmpDir);
     tmpDir.mkdirs();
@@ -100,7 +100,7 @@
   /**
    * Extract an archive to the temp directory.
    * Code borrowed from Hadoop's TrackerDistributedCacheManager
-   * 
+   *
    * @param cacheArchive the cache archive to extract
    * @param tmpDir root location of temp directory
    * @return the path to the extracted archive
diff --git a/src/main/java/org/apache/hadoop/mrunit/internal/util/Errors.java b/src/main/java/org/apache/hadoop/mrunit/internal/util/Errors.java
index b47cf54..1a47fc9 100644
--- a/src/main/java/org/apache/hadoop/mrunit/internal/util/Errors.java
+++ b/src/main/java/org/apache/hadoop/mrunit/internal/util/Errors.java
@@ -59,15 +59,30 @@
   }
 
   /**
+   * Not empty after first record.
+   */
+  public boolean isEmpty() {
+    return messages.isEmpty();
+  }
+
+  /**
    * Throw an validation exception if any message have been recorded before.
    */
   public void assertNone() {
-    if (!messages.isEmpty()) {
-      final StringBuilder buffer = new StringBuilder();
-      buffer.append(messages.size()).append(" Error(s): ");
-      StringUtils.formatValueList(messages, buffer);
-      fail(buffer.toString());
+    if (!isEmpty()) {
+      fail(toString());
     }
   }
 
+  /* (non-Javadoc)
+   * @see java.lang.Object#toString()
+   */
+  @Override
+  public String toString() {
+    final StringBuilder buffer = new StringBuilder();
+    buffer.append(messages.size()).append(" Error(s): ");
+    StringUtils.formatValueList(messages, buffer);
+    return buffer.toString();
+  }
+
 }
diff --git a/src/main/java/org/apache/hadoop/mrunit/internal/util/PairComparator.java b/src/main/java/org/apache/hadoop/mrunit/internal/util/PairComparator.java
deleted file mode 100644
index 099b8fc..0000000
--- a/src/main/java/org/apache/hadoop/mrunit/internal/util/PairComparator.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.mrunit.internal.util;
-
-import java.util.Comparator;
-
-import org.apache.hadoop.mrunit.types.Pair;
-
-public class PairComparator<K, V> implements Comparator<Pair<K,V>> {
-
-  private final Comparator<K> keyComparator;
-  private final Comparator<V> valueComparator;
-
-  public PairComparator(final Comparator<K> keyComparator,
-      final Comparator<V> valueComparator) {
-    this.keyComparator = keyComparator;
-    this.valueComparator = valueComparator;
-  }
-
-  @Override
-  public int compare(final Pair<K, V> o1, Pair<K, V> o2) {
-    int comparison;
-    if (keyComparator != null) {
-      comparison = keyComparator.compare(o1.getFirst(), o2.getFirst());
-    } else if (o1.getFirst().getClass() != o2.getFirst().getClass()) {
-      /* This case needs to be here in order to handle the type unsafety
-       * introduced by withInputFromString and withOutputFromString (which are
-       * currently marked as deprecated). Once these functions are removed,
-       * this case can also be removed.
-       */
-      return -1;
-    } else {
-      comparison = ((Comparable<K>) o1.getFirst()).compareTo(o2.getFirst());
-    }
-    if (comparison != 0) {
-      return comparison;
-    }
-    if (valueComparator != null) {
-      return this.valueComparator.compare(o1.getSecond(), o2.getSecond());
-    } else if (o1.getSecond().getClass() != o2.getSecond().getClass()) {
-      /* This case needs to be here in order to handle the type unsafety
-       * introduced by withInputFromString and withOutputFromString (which are
-       * currently marked as deprecated). Once these functions are removed,
-       * this case can also be removed.
-       */
-      return -1;
-    } else {
-      return ((Comparable<V>) o1.getSecond()).compareTo(o2.getSecond());
-    }
-  }
-}
diff --git a/src/main/java/org/apache/hadoop/mrunit/internal/util/PairEquality.java b/src/main/java/org/apache/hadoop/mrunit/internal/util/PairEquality.java
new file mode 100644
index 0000000..2457f3c
--- /dev/null
+++ b/src/main/java/org/apache/hadoop/mrunit/internal/util/PairEquality.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mrunit.internal.util;
+
+import java.util.Comparator;
+
+import org.apache.hadoop.mrunit.types.Pair;
+
+public class PairEquality<K, V> {
+
+  private final Comparator<K> keyComparator;
+  private final Comparator<V> valueComparator;
+
+  public PairEquality(final Comparator<K> keyComparator,
+      final Comparator<V> valueComparator) {
+    this.keyComparator = keyComparator;
+    this.valueComparator = valueComparator;
+  }
+
+  public boolean isTrueFor(final Pair<K, V> o1, Pair<K, V> o2) {
+    return equalityOf(o1.getFirst(), o2.getFirst(), keyComparator)
+        && equalityOf(o1.getSecond(), o2.getSecond(), valueComparator);
+  }
+
+  private <T> boolean equalityOf(final T t1, final T t2, final Comparator<T> c) {
+    if (c != null) {
+      return c.compare(t1, t2) == 0;
+    }
+    if (t1 == null && t2 == null) {
+      return true;
+    }
+    if (t1 != null && t2 == null) {
+      return false;
+    }
+    if (t1 == null && t2 != null) {
+      return false;
+    }
+    return t1.equals(t2);
+  }
+}
diff --git a/src/main/java/org/apache/hadoop/mrunit/internal/util/StringUtils.java b/src/main/java/org/apache/hadoop/mrunit/internal/util/StringUtils.java
index ed76f28..3ac3e1b 100644
--- a/src/main/java/org/apache/hadoop/mrunit/internal/util/StringUtils.java
+++ b/src/main/java/org/apache/hadoop/mrunit/internal/util/StringUtils.java
@@ -24,10 +24,10 @@
 import org.apache.hadoop.mrunit.types.Pair;
 
 public class StringUtils {
-  
+
   /**
    * Split "key \t val" into Pair(Text(key), Text(val))
-   * 
+   *
    * @param tabSeparatedPair
    * @return
    */
@@ -42,7 +42,7 @@
 
   /**
    * Split "val,val,val,val..." into a List of Text(val) objects.
-   * 
+   *
    * @param commaDelimList
    *          A list of values separated by commas
    */
@@ -68,7 +68,7 @@
 
     return outList;
   }
-  
+
   /**
    * Transform a list with elements a and b into a string "(a,b)".
    */
diff --git a/src/main/java/org/apache/hadoop/mrunit/mapreduce/MapReduceDriver.java b/src/main/java/org/apache/hadoop/mrunit/mapreduce/MapReduceDriver.java
index 7ead0aa..4bf05fd 100644
--- a/src/main/java/org/apache/hadoop/mrunit/mapreduce/MapReduceDriver.java
+++ b/src/main/java/org/apache/hadoop/mrunit/mapreduce/MapReduceDriver.java
@@ -82,7 +82,7 @@
 
   /**
    * Set the Mapper instance to use with this test driver
-   * 
+   *
    * @param m
    *          the Mapper instance to use
    */
@@ -106,7 +106,7 @@
 
   /**
    * Sets the reducer object to use for this test
-   * 
+   *
    * @param r
    *          The reducer object to use
    */
@@ -116,7 +116,7 @@
 
   /**
    * Identical to setReducer(), but with fluent programming style
-   * 
+   *
    * @param r
    *          The Reducer to use
    * @return this
@@ -136,7 +136,7 @@
 
   /**
    * Sets the reducer object to use as a combiner for this test
-   * 
+   *
    * @param c
    *          The combiner object to use
    */
@@ -146,7 +146,7 @@
 
   /**
    * Identical to setCombiner(), but with fluent programming style
-   * 
+   *
    * @param c
    *          The Combiner to use
    * @return this
@@ -171,7 +171,7 @@
 
   /**
    * Sets the counters object to use for this test.
-   * 
+   *
    * @param ctrs
    *          The counters object to use.
    */
@@ -190,7 +190,7 @@
   /**
    * Configure {@link Reducer} to output with a real {@link OutputFormat}. Set
    * {@link InputFormat} to read output back in for use with run* methods
-   * 
+   *
    * @param outputFormatClass
    * @param inputFormatClass
    * @return this for fluent style
@@ -299,7 +299,7 @@
   /**
    * Returns a new MapReduceDriver without having to specify the generic types
    * on the right hand side of the object create statement.
-   * 
+   *
    * @return new MapReduceDriver
    */
   public static <K1, V1, K2, V2, K3, V3> MapReduceDriver<K1, V1, K2, V2, K3, V3> newMapReduceDriver() {
@@ -309,7 +309,7 @@
   /**
    * Returns a new MapReduceDriver without having to specify the generic types
    * on the right hand side of the object create statement.
-   * 
+   *
    * @param mapper
    *          passed to MapReduceDriver constructor
    * @param reducer
@@ -324,7 +324,7 @@
   /**
    * Returns a new MapReduceDriver without having to specify the generic types
    * on the right hand side of the object create statement.
-   * 
+   *
    * @param mapper
    *          passed to MapReduceDriver constructor
    * @param reducer
diff --git a/src/main/java/org/apache/hadoop/mrunit/testutil/ExtendedAssert.java b/src/main/java/org/apache/hadoop/mrunit/testutil/ExtendedAssert.java
index 05420cb..1aa8c47 100644
--- a/src/main/java/org/apache/hadoop/mrunit/testutil/ExtendedAssert.java
+++ b/src/main/java/org/apache/hadoop/mrunit/testutil/ExtendedAssert.java
@@ -33,7 +33,7 @@
 
   /**
    * Asserts that all the elements of the list are equivalent under equals()
-   * 
+   *
    * @param expected
    *          a list full of expected values
    * @param actual
@@ -65,7 +65,7 @@
 
   /**
    * asserts x &gt; y
-   * 
+   *
    * @deprecated No replacement since extension methods to JUnit assert methods
    *             dont fit well with MRUnit's goals
    */
@@ -76,7 +76,7 @@
 
   /**
    * asserts x &gt;= y)
-   * 
+   *
    * @deprecated No replacement since extension methods to JUnit assert methods
    *             dont fit well with MRUnit's goals
    */
@@ -87,7 +87,7 @@
 
   /**
    * asserts x &lt; y
-   * 
+   *
    * @deprecated No replacement since extension methods to JUnit assert methods
    *             dont fit well with MRUnit's goals
    */
@@ -98,7 +98,7 @@
 
   /**
    * asserts x &gt;= y)
-   * 
+   *
    * @deprecated No replacement since extension methods to JUnit assert methods
    *             dont fit well with MRUnit's goals
    * */
diff --git a/src/main/java/org/apache/hadoop/mrunit/testutil/TemporaryPath.java b/src/main/java/org/apache/hadoop/mrunit/testutil/TemporaryPath.java
index 2f915d0..c298252 100644
--- a/src/main/java/org/apache/hadoop/mrunit/testutil/TemporaryPath.java
+++ b/src/main/java/org/apache/hadoop/mrunit/testutil/TemporaryPath.java
@@ -38,29 +38,29 @@
  * destroys it afterwards. This works much like JUnit's {@link TemporaryFolder} but is
  * geared towards Hadoop applications. The temporary directory is deleted after each test
  * case, no matter if the test case succeeded or failed.
- * 
+ *
  * <p>This is how it works:</p>
  *
  * <pre><tt>
  * public class TestExample {
  *   {@literal @Rule}
  *   public TemporaryPath tmpDir = new TemporaryPath();
- *   
+ *
  *   {@literal @Test}
  *   public void testSomething() {
  *     Path input = tmpDir.copyResourcePath("my-test-data");
  *     Path output = tmpDir.getPath("output");
- *     
+ *
  *     // create and run a Hadoop job reading from input and writing to output
  *   }
  * }
  * </tt></pre>
- * 
+ *
  * <p>In some cases, the frameworks you use in your tests use temporary directories
  * internally. If those directories are configurable via Hadoop properties, you can
  * let {@link TemporaryPath#overridePathProperties(Configuration)} override them
  * so that they point to your managed temporary directory. You have to specify the
- * properties to override via the constructor.</p> 
+ * properties to override via the constructor.</p>
  */
 public final class TemporaryPath extends ExternalResource {
   private final TemporaryFolder tmp = new TemporaryFolder();
@@ -143,7 +143,7 @@
   public Path copyResourcePath(String resourceName) throws IOException {
     return toPath(copyResourceFile(resourceName));
   }
-  
+
 
   /**
    * Get a new {@link Configuration} instance.
diff --git a/src/main/java/org/apache/hadoop/mrunit/types/Pair.java b/src/main/java/org/apache/hadoop/mrunit/types/Pair.java
index 5ce462b..826a695 100644
--- a/src/main/java/org/apache/hadoop/mrunit/types/Pair.java
+++ b/src/main/java/org/apache/hadoop/mrunit/types/Pair.java
@@ -23,7 +23,7 @@
 
 /**
  * A very basic pair type that does not allow null values.
- * 
+ *
  * @param <S>
  * @param <T>
  */
diff --git a/src/test/java/org/apache/hadoop/mrunit/ExtendedAssert.java b/src/test/java/org/apache/hadoop/mrunit/ExtendedAssert.java
index 3adb020..83d4f4b 100644
--- a/src/test/java/org/apache/hadoop/mrunit/ExtendedAssert.java
+++ b/src/test/java/org/apache/hadoop/mrunit/ExtendedAssert.java
@@ -28,7 +28,7 @@
 
   /**
    * Asserts that all the elements of the list are equivalent under equals()
-   * 
+   *
    * @param expected
    *          a list full of expected values
    * @param actual
diff --git a/src/test/java/org/apache/hadoop/mrunit/TestDistributedCache.java b/src/test/java/org/apache/hadoop/mrunit/TestDistributedCache.java
index d4b61b7..daa38f7 100644
--- a/src/test/java/org/apache/hadoop/mrunit/TestDistributedCache.java
+++ b/src/test/java/org/apache/hadoop/mrunit/TestDistributedCache.java
@@ -48,11 +48,11 @@
   private Mapper<Text,Text,Text,Text> mapper = new TestDistributedCacheMapperAndReducer();
   private Reducer<Text,Text,Text,Text> reducer = new TestDistributedCacheMapperAndReducer();
 
-  private MapDriver<Text,Text,Text,Text> mapDriver = 
+  private MapDriver<Text,Text,Text,Text> mapDriver =
       MapDriver.newMapDriver(mapper);
-  private ReduceDriver<Text,Text,Text,Text> reduceDriver = 
+  private ReduceDriver<Text,Text,Text,Text> reduceDriver =
       ReduceDriver.newReduceDriver(reducer);
-  private MapReduceDriver<Text,Text,Text,Text,Text,Text> mapReduceDriver = 
+  private MapReduceDriver<Text,Text,Text,Text,Text,Text> mapReduceDriver =
       MapReduceDriver.newMapReduceDriver();
 
   /**
@@ -60,7 +60,7 @@
    * cache and outputs the filenames as keys, and whether the cache item is a file
    * or directory ("file" or "dir") as value
    */
-  private static class TestDistributedCacheMapperAndReducer extends MapReduceBase 
+  private static class TestDistributedCacheMapperAndReducer extends MapReduceBase
     implements Mapper<Text,Text,Text,Text>, Reducer<Text,Text,Text,Text> {
 
     private static final Text DIR = new Text("dir");
@@ -109,7 +109,7 @@
       }
     }
 
-    private void outputPath(String parentPath, Path path, 
+    private void outputPath(String parentPath, Path path,
         OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
       FileStatus fstat = fs.getFileStatus(path);
       boolean isDir = fstat.isDir();
@@ -184,7 +184,7 @@
     reduceDriver.withCacheFile("testfile")
       .withOutput(new Text("testfile"), new Text("file")).runTest(false);
   }
-  
+
   @Test
   public void testAddCacheFileToReducerUsingStaticMethod() throws Exception
   {
@@ -221,7 +221,7 @@
       .withOutput(new Text("testarchive.tar/d"), new Text("file"))
       .runTest(false);
   }
-  
+
   @Test
   public void testAddCacheArchiveToMapReduceUsingDriverMethod2() throws IOException
   {
diff --git a/src/test/java/org/apache/hadoop/mrunit/TestExample.java b/src/test/java/org/apache/hadoop/mrunit/TestExample.java
index e211995..a1118f8 100644
--- a/src/test/java/org/apache/hadoop/mrunit/TestExample.java
+++ b/src/test/java/org/apache/hadoop/mrunit/TestExample.java
@@ -28,7 +28,7 @@
 /**
  * Example test of the IdentityMapper to demonstrate proper MapDriver usage in a
  * test case.
- * 
+ *
  * This example is reproduced in the overview for the MRUnit javadoc.
  */
 public class TestExample {
diff --git a/src/test/java/org/apache/hadoop/mrunit/TestMapDriver.java b/src/test/java/org/apache/hadoop/mrunit/TestMapDriver.java
index 0952f0b..6b57a7e 100644
--- a/src/test/java/org/apache/hadoop/mrunit/TestMapDriver.java
+++ b/src/test/java/org/apache/hadoop/mrunit/TestMapDriver.java
@@ -40,7 +40,9 @@
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
+import org.apache.hadoop.mrunit.MapDriver;
 import org.apache.hadoop.mrunit.types.Pair;
+import org.apache.hadoop.mrunit.types.UncomparableWritable;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -59,7 +61,7 @@
     mapper = new IdentityMapper<Text, Text>();
     driver = MapDriver.newMapDriver(mapper);
   }
-  
+
   @Test
   public void testRun() throws IOException {
     final List<Pair<Text, Text>> out = driver.withInput(new Text("foo"),
@@ -72,6 +74,14 @@
   }
 
   @Test
+  public void testUncomparable() throws IOException {
+    Object k = new UncomparableWritable(1);
+    Object v = new UncomparableWritable(2);
+    MapDriver.newMapDriver(new IdentityMapper<Object, Object>())
+        .withInput(k, v).withOutput(k, v).runTest();
+  }
+
+  @Test
   public void testTestRun1() throws IOException {
     driver.withInput(new Text("foo"), new Text("bar"))
         .withOutput(new Text("foo"), new Text("bar")).runTest();
@@ -80,8 +90,7 @@
   @Test
   public void testTestRun2() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Expected no outputs; got 1 outputs., "
-            + "Received unexpected output (foo, bar) at position 0.)");
+        .expectAssertionErrorMessage("1 Error(s): (Expected no output; got 1 output(s).)");
     driver.withInput(new Text("foo"), new Text("bar")).runTest();
   }
 
@@ -124,8 +133,8 @@
   @Test
   public void testTestRun5() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (foo, somethingelse) at position 0., "
-            + "Received unexpected output (foo, bar) at position 0.)");
+        .expectAssertionErrorMessage("1 Error(s): (Missing expected output (foo, somethingelse)" +
+        		" at position 0, got (foo, bar).)");
     driver.withInput(new Text("foo"), new Text("bar"))
         .withOutput(new Text("foo"), new Text("somethingelse")).runTest(true);
   }
@@ -157,8 +166,8 @@
   @Test
   public void testTestRun6() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (someotherkey, bar) at position 0., "
-            + "Received unexpected output (foo, bar) at position 0.)");
+        .expectAssertionErrorMessage("1 Error(s): (Missing expected output (someotherkey, bar)" +
+        		" at position 0, got (foo, bar).)");
     driver.withInput(new Text("foo"), new Text("bar"))
         .withOutput(new Text("someotherkey"), new Text("bar")).runTest(true);
   }
@@ -172,6 +181,9 @@
         if (o2.toString().equals("foo") && o1.toString().equals("someotherkey")) {
             return 0;
         }
+        if (o2.toString().equals("someotherkey") && o1.toString().equals("foo")) {
+          return 0;
+      }
         return o1.compareTo(o2);
       }
     };
@@ -193,9 +205,8 @@
   @Test
   public void testTestRun7() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Matched expected output (foo, bar) but at "
-            + "incorrect position 0 (expected position 1), "
-            + "Missing expected output (someotherkey, bar) at position 0.)");
+        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (someotherkey, bar)" +
+        		" at position 0, got (foo, bar)., Missing expected output (foo, bar) at position 1.)");
     driver.withInput(new Text("foo"), new Text("bar"))
         .withOutput(new Text("someotherkey"), new Text("bar"))
         .withOutput(new Text("foo"), new Text("bar")).runTest(true);
@@ -209,7 +220,7 @@
         .withOutput(new Text("someotherkey"), new Text("bar"))
         .withOutput(new Text("foo"), new Text("bar")).runTest(false);
   }
-  
+
   @Test
   public void testTestRun8OrderInsensitive() throws IOException {
     thrown
@@ -222,7 +233,7 @@
         .withOutput(new Text("foo"), new Text("bar"))
         .withOutput(new Text("foo"), new Text("bar")).runTest(false);
   }
-  
+
   @Test
   public void testAddAll() throws IOException {
     final List<Pair<Text, Text>> inputs = new ArrayList<Pair<Text, Text>>();
@@ -235,7 +246,7 @@
 
     driver.withAll(inputs).withAllOutput(outputs).runTest();
   }
-  
+
   @Test
   public void testUnexpectedOutput() throws IOException {
     thrown
@@ -244,7 +255,7 @@
         .withOutput(new Text("foo"),new Text("bar"))
         .runTest(true);
   }
-  
+
   @Test
   public void testUnexpectedOutputMultiple() throws IOException {
     thrown
@@ -255,7 +266,7 @@
         .withOutput(new Text("foo"),new Text("bar"))
         .runTest(true);
   }
-  
+
   @Test
   public void testUnexpectedOutputMultipleComparator() throws IOException {
     Comparator<Text> comparatorAlwaysEqual = new Comparator<Text>() {
@@ -288,7 +299,7 @@
         .withOutput(new Text("foo"),new Text("bar"))
         .runTest(false);
   }
-  
+
   @Test
   public void testUnexpectedOutputMultipleOrderInsensitive() throws IOException {
     thrown
@@ -400,7 +411,7 @@
   @Test
   public void testWithCounterAndEnumCounterMissing() throws IOException {
     MapDriver<Text, Text, Text, Text> driver = MapDriver.newMapDriver();
-    
+
     thrown
         .expectAssertionErrorMessage("1 Error(s): (Actual counter ("
             + "\"org.apache.hadoop.mrunit.TestMapDriver$MapperWithCounters$Counters\",\"X\")"
@@ -416,7 +427,7 @@
   @Test
   public void testWithCounterAndStringCounterMissing() throws IOException {
     MapDriver<Text, Text, Text, Text> driver = MapDriver.newMapDriver();
-    
+
     thrown
     .expectAssertionErrorMessage("1 Error(s): (Actual counter ("
         + "\"category\",\"name\")"
@@ -498,12 +509,9 @@
     driver.withInputFromString("a\tb");
     driver.withOutputFromString("1\ta");
     thrown.expectAssertionErrorMessage("2 Error(s)");
-    thrown.expectAssertionErrorMessage("Missing expected output (1, a): "
-        + "Mismatch in key class: expected: class org.apache.hadoop.io.Text "
-        + "actual: class org.apache.hadoop.io.LongWritable");
-    thrown.expectAssertionErrorMessage("Received unexpected output (1, a): "
-        + "Mismatch in key class: expected: class org.apache.hadoop.io.Text "
-        + "actual: class org.apache.hadoop.io.LongWritable");
+    thrown.expectAssertionErrorMessage("Missing expected output (1, a) at position 0, got (1, a).");
+    thrown.expectAssertionErrorMessage("Mismatch in key class: expected:" +
+    		" class org.apache.hadoop.io.Text actual: class org.apache.hadoop.io.LongWritable");
     driver.runTest();
   }
 
@@ -526,7 +534,7 @@
     implements Mapper<Text, Text, Text, Text> {
     private int duplicationFactor = 2;
     public DuplicatingMapper() {
-     
+
     }
     public DuplicatingMapper(int factor) {
       duplicationFactor = factor;
@@ -546,10 +554,10 @@
     driver.withInputFromString("a\tb");
     driver.withOutputFromString("a\t1");
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (a, 1): Mismatch in value class: "
-            + "expected: class org.apache.hadoop.io.Text actual: class org.apache.hadoop.io.LongWritable, "
-            + "Received unexpected output (a, 1): Mismatch in value class: expected: class "
-            + "org.apache.hadoop.io.Text actual: class org.apache.hadoop.io.LongWritable)");
+        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (a, 1)" +
+        		" at position 0, got (a, 1)., Mismatch in value class:" +
+        		" expected: class org.apache.hadoop.io.Text" +
+        		" actual: class org.apache.hadoop.io.LongWritable)");
     driver.runTest();
   }
 
diff --git a/src/test/java/org/apache/hadoop/mrunit/TestMapReduceDriver.java b/src/test/java/org/apache/hadoop/mrunit/TestMapReduceDriver.java
index 254e461..50da602 100644
--- a/src/test/java/org/apache/hadoop/mrunit/TestMapReduceDriver.java
+++ b/src/test/java/org/apache/hadoop/mrunit/TestMapReduceDriver.java
@@ -48,10 +48,13 @@
 import org.apache.hadoop.mapred.lib.LongSumReducer;
 import org.apache.hadoop.mrunit.types.Pair;
 import org.apache.hadoop.mrunit.types.TestWritable;
+import org.apache.hadoop.mrunit.types.UncomparableWritable;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 
+import com.google.common.collect.Lists;
+
 public class TestMapReduceDriver {
 
   private static final int FOO_IN_A = 42;
@@ -94,6 +97,16 @@
   }
 
   @Test
+  public void testUncomparable() throws IOException {
+    Text k = new Text("test");
+    Object v = new UncomparableWritable(2);
+    MapReduceDriver.newMapReduceDriver(
+        new IdentityMapper<Text, Object>(),
+        new IdentityReducer<Text, Object>())
+        .withInput(k, v).withOutput(k, v).runTest();
+  }
+
+  @Test
   public void testTestRun1() throws IOException {
     driver.withInput(new Text("foo"), new LongWritable(FOO_IN_A))
         .withInput(new Text("foo"), new LongWritable(FOO_IN_B))
@@ -114,10 +127,8 @@
   @Test
   public void testTestRun3() throws IOException {
     thrown.expectAssertionErrorMessage("2 Error(s)");
-    thrown.expectAssertionErrorMessage("Matched expected output (foo, 52) but "
-        + "at incorrect position 1 (expected position 0)");
-    thrown.expectAssertionErrorMessage("Matched expected output (bar, 12) but "
-        + "at incorrect position 0 (expected position 1)");
+    thrown.expectAssertionErrorMessage("Missing expected output (foo, 52) at position 0, got (bar, 12).");
+    thrown.expectAssertionErrorMessage("Missing expected output (bar, 12) at position 1, got (foo, 52).");
     driver.withInput(new Text("foo"), new LongWritable(FOO_IN_A))
         .withInput(new Text("bar"), new LongWritable(BAR_IN))
         .withInput(new Text("foo"), new LongWritable(FOO_IN_B))
@@ -138,7 +149,7 @@
 
     driver.withAll(inputs).withAllOutput(outputs).runTest();
   }
-  
+
   @Test
   public void testTestRun3OrderInsensitive() throws IOException {
     driver.withInput(new Text("foo"), new LongWritable(FOO_IN_A))
@@ -419,7 +430,7 @@
         .withCounter("category", "count", 1).withCounter("category", "sum", 1)
         .runTest();
   }
-  
+
   @Test
   public void testWithCounterAndNoneMissing() throws IOException {
     MapReduceDriver<Text, Text, Text, Text, Text, Text> driver = MapReduceDriver
@@ -581,7 +592,7 @@
 
   @Test
   public void testMapInputFile() throws IOException {
-    InputPathStoringMapper<LongWritable,LongWritable> mapper = 
+    InputPathStoringMapper<LongWritable,LongWritable> mapper =
         new InputPathStoringMapper<LongWritable,LongWritable>();
     Path mapInputPath = new Path("myfile");
     driver = MapReduceDriver.newMapReduceDriver(mapper, reducer);
@@ -609,10 +620,10 @@
 
   @Test
   public void testGroupingComparatorBehaviour2() throws IOException {
-    // this test fails pre-MRUNIT-127 because of the incorrect 
-    // grouping of reduce keys in "shuffle". 
+    // this test fails pre-MRUNIT-127 because of the incorrect
+    // grouping of reduce keys in "shuffle".
     // MapReduce doesn't group keys which aren't in a contiguous
-    // range when sorted by their sorting comparator. 
+    // range when sorted by their sorting comparator.
     driver.withInput(new Text("1A"),new LongWritable(1L))
       .withInput(new Text("2A"),new LongWritable(1L))
       .withInput(new Text("1B"),new LongWritable(1L))
diff --git a/src/test/java/org/apache/hadoop/mrunit/TestPipelineMapReduceDriver.java b/src/test/java/org/apache/hadoop/mrunit/TestPipelineMapReduceDriver.java
index 398e584..f566f77 100644
--- a/src/test/java/org/apache/hadoop/mrunit/TestPipelineMapReduceDriver.java
+++ b/src/test/java/org/apache/hadoop/mrunit/TestPipelineMapReduceDriver.java
@@ -35,6 +35,7 @@
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.mapred.lib.LongSumReducer;
 import org.apache.hadoop.mrunit.types.Pair;
+import org.apache.hadoop.mrunit.types.UncomparableWritable;
 import org.junit.Rule;
 import org.junit.Test;
 
@@ -60,6 +61,16 @@
   }
 
   @Test
+  public void testUncomparable() throws IOException {
+    Text k = new Text("test");
+    Object v = new UncomparableWritable(2);
+    PipelineMapReduceDriver.newPipelineMapReduceDriver().withMapReduce(
+        new IdentityMapper<Text, Object>(),
+        new IdentityReducer<Text, Object>())
+        .withInput(k, v).withOutput(k, v).runTest();
+  }
+
+  @Test
   public void testNoInput() throws IOException {
     thrown.expectMessage(IllegalStateException.class, "No input was provided");
     final PipelineMapReduceDriver<Text, Text, Text, Text> driver = PipelineMapReduceDriver.newPipelineMapReduceDriver();
@@ -111,13 +122,13 @@
 
   @Test
   public void testSumAtEndWithInputAddAll() throws IOException {
-    
+
     final List<Pair<Text, LongWritable>> inputs = new ArrayList<Pair<Text, LongWritable>>();
     inputs.add(new Pair<Text, LongWritable>(new Text("foo"), new LongWritable(FOO_IN_A)));
     inputs.add(new Pair<Text, LongWritable>(new Text("bar"), new LongWritable(BAR_IN)));
     inputs.add(new Pair<Text, LongWritable>(new Text("foo"), new LongWritable(FOO_IN_B)));
-    
-    final PipelineMapReduceDriver<Text, LongWritable, Text, LongWritable> driver 
+
+    final PipelineMapReduceDriver<Text, LongWritable, Text, LongWritable> driver
       = PipelineMapReduceDriver.newPipelineMapReduceDriver();
     driver
         .withMapReduce(new IdentityMapper<Text, LongWritable>(),
@@ -192,7 +203,7 @@
         .withCounter("category", "count", 2).withCounter("category", "sum", 2)
         .runTest();
   }
-  
+
   @Test
   public void testWithCounterAndNoneMissing() throws IOException {
     final PipelineMapReduceDriver<Text, Text, Text, Text> driver = PipelineMapReduceDriver.newPipelineMapReduceDriver();
@@ -331,10 +342,10 @@
     driver.addOutput(key, value);
     driver.runTest();
   }
-  
+
   @Test
   public void testMapInputFile() throws IOException {
-    InputPathStoringMapper<LongWritable,LongWritable> mapper = 
+    InputPathStoringMapper<LongWritable,LongWritable> mapper =
         new InputPathStoringMapper<LongWritable,LongWritable>();
     Path mapInputPath = new Path("myfile");
     final PipelineMapReduceDriver<Text, LongWritable, Text, LongWritable> driver = PipelineMapReduceDriver
diff --git a/src/test/java/org/apache/hadoop/mrunit/TestReduceDriver.java b/src/test/java/org/apache/hadoop/mrunit/TestReduceDriver.java
index e0c3be6..9b0feec 100644
--- a/src/test/java/org/apache/hadoop/mrunit/TestReduceDriver.java
+++ b/src/test/java/org/apache/hadoop/mrunit/TestReduceDriver.java
@@ -40,11 +40,15 @@
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.mapred.lib.LongSumReducer;
+import org.apache.hadoop.mrunit.ReduceDriver;
 import org.apache.hadoop.mrunit.types.Pair;
+import org.apache.hadoop.mrunit.types.UncomparableWritable;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 
+import com.google.common.collect.Lists;
+
 @SuppressWarnings("deprecation")
 public class TestReduceDriver {
 
@@ -77,8 +81,16 @@
     assertListEquals(out, expected);
 
   }
-  
- 
+
+  @Test
+  public void testUncomparable() throws IOException {
+    Object k = new UncomparableWritable(1);
+    Object v = new UncomparableWritable(2);
+    ReduceDriver.newReduceDriver(new IdentityReducer<Object, Object>())
+        .withInput(k, Lists.newArrayList(v)).withOutput(k, v).runTest();
+  }
+
+
 
   @Test
   public void testTestRun1() throws IOException {
@@ -90,8 +102,7 @@
   @Test
   public void testTestRun2() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (bar, 10) at position 0., "
-            + "Received unexpected output (foo, 10) at position 0.)");
+        .expectAssertionErrorMessage("1 Error(s): (Missing expected output (bar, 10) at position 0, got (foo, 10).)");
     driver.withInputKey(new Text("foo")).withInputValue(new LongWritable(IN_A))
         .withInputValue(new LongWritable(IN_B))
         .withOutput(new Text("bar"), new LongWritable(OUT_VAL)).runTest(true);
@@ -110,8 +121,8 @@
   @Test
   public void testTestRun3() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (foo, 12) at position 0., "
-            + "Received unexpected output (foo, 10) at position 0.)");
+        .expectAssertionErrorMessage("1 Error(s): (Missing expected output (foo, 12)" +
+        		" at position 0, got (foo, 10).)");
     driver.withInputKey(new Text("foo")).withInputValue(new LongWritable(IN_A))
         .withInputValue(new LongWritable(IN_B))
         .withOutput(new Text("foo"), new LongWritable(INCORRECT_OUT))
@@ -161,9 +172,8 @@
       }
     };
     driver.setValueComparator(toleranceComparator);
-    thrown.expectAssertionErrorMessage("2 Error(s)");
-    thrown.expectAssertionErrorMessage("Missing expected output (foo, 12)");
-    thrown.expectAssertionErrorMessage("Received unexpected output (foo, 10)");
+    thrown.expectAssertionErrorMessage("1 Error(s)");
+    thrown.expectAssertionErrorMessage("Missing expected output (foo, 12) at position 0, got (foo, 10).");
     driver.withInputKey(new Text("foo")).withInputValue(new LongWritable(IN_A))
         .withInputValue(new LongWritable(IN_B))
         .withOutput(new Text("foo"), new LongWritable(INCORRECT_OUT))
@@ -173,8 +183,8 @@
   @Test
   public void testTestRun4() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (foo, 4) at position 0., "
-            + "Received unexpected output (foo, 10) at position 0.)");
+        .expectAssertionErrorMessage("1 Error(s): (Missing expected output (foo, 4)" +
+        		" at position 0, got (foo, 10).)");
     driver.withInputKey(new Text("foo")).withInputValue(new LongWritable(IN_A))
         .withInputValue(new LongWritable(IN_B))
         .withOutput(new Text("foo"), new LongWritable(IN_A)).runTest(true);
@@ -192,10 +202,9 @@
 
   @Test
   public void testTestRun5() throws IOException {
-    thrown.expectAssertionErrorMessage("3 Error(s)");
-    thrown.expectAssertionErrorMessage("Missing expected output (foo, 4) at position 0.");
+    thrown.expectAssertionErrorMessage("2 Error(s)");
+    thrown.expectAssertionErrorMessage("Missing expected output (foo, 4) at position 0, got (foo, 10).");
     thrown.expectAssertionErrorMessage("Missing expected output (foo, 6) at position 1.");
-    thrown.expectAssertionErrorMessage("Received unexpected output (foo, 10) at position 0.");
     driver.withInputKey(new Text("foo")).withInputValue(new LongWritable(IN_A))
         .withInputValue(new LongWritable(IN_B))
         .withOutput(new Text("foo"), new LongWritable(IN_A))
@@ -237,9 +246,8 @@
   @Test
   public void testTestRun7() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (bar, 10) at position 0., "
-            + "Matched expected output (foo, 10) but at "
-            + "incorrect position 0 (expected position 1))");
+        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (bar, 10)" +
+        		" at position 0, got (foo, 10)., Missing expected output (foo, 10) at position 1.)");
     driver.withInputKey(new Text("foo")).withInputValue(new LongWritable(IN_A))
         .withInputValue(new LongWritable(IN_B))
         .withOutput(new Text("bar"), new LongWritable(OUT_VAL))
@@ -470,7 +478,7 @@
         .withCounter("category", "count", 2).withCounter("category", "sum", 2)
         .runTest();
   }
-  
+
   @Test
   public void testWithCounterAndNoneMissingMultipleInput() throws IOException {
     final ReduceDriver<Text, Text, Text, Text> driver = ReduceDriver
@@ -539,7 +547,7 @@
         .withCounter(ReducerWithCounters.Counters.SUM, 2)
         .withCounter("category", "sum", 2).runTest();
   }
-  
+
   @Test
   public void testWithFailedEnumCounter() throws IOException {
     final ReduceDriver<Text, Text, Text, Text> driver = ReduceDriver
diff --git a/src/test/java/org/apache/hadoop/mrunit/internal/util/TestStringUtils.java b/src/test/java/org/apache/hadoop/mrunit/internal/util/TestStringUtils.java
index 070a743..8ad6e73 100644
--- a/src/test/java/org/apache/hadoop/mrunit/internal/util/TestStringUtils.java
+++ b/src/test/java/org/apache/hadoop/mrunit/internal/util/TestStringUtils.java
@@ -216,7 +216,7 @@
     StringUtils.formatValueList(Arrays.asList("single"), stringBuilder);
     assertEquals("previous message (single)", stringBuilder.toString());
   }
-  
+
   @Test
   public void shouldFormatValueListWithManyElement() {
     StringBuilder stringBuilder = new StringBuilder();
diff --git a/src/test/java/org/apache/hadoop/mrunit/mapreduce/StatefulMapper.java b/src/test/java/org/apache/hadoop/mrunit/mapreduce/StatefulMapper.java
index 1a968ef..866aeea 100644
--- a/src/test/java/org/apache/hadoop/mrunit/mapreduce/StatefulMapper.java
+++ b/src/test/java/org/apache/hadoop/mrunit/mapreduce/StatefulMapper.java
@@ -34,7 +34,7 @@
 
   /**
    * Increment someState for each input.
-   * 
+   *
    * @param context
    *          the Hadoop job Map context
    * @throws java.io.IOException
@@ -48,7 +48,7 @@
   /**
    * Runs once after all maps have occurred. Dumps the accumulated state to the
    * output.
-   * 
+   *
    * @param context
    *          the Hadoop job Map context
    */
diff --git a/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestDistributedCache.java b/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestDistributedCache.java
index fb5360e..4817f66 100644
--- a/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestDistributedCache.java
+++ b/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestDistributedCache.java
@@ -42,27 +42,27 @@
   private Mapper<Text,Text,Text,Text> mapper = new TestDistributedCacheMapper();
   private Reducer<Text,Text,Text,Text> reducer = new TestDistributedCacheReducer();
 
-  private MapDriver<Text,Text,Text,Text> mapDriver = 
+  private MapDriver<Text,Text,Text,Text> mapDriver =
       MapDriver.newMapDriver(mapper);
-  private ReduceDriver<Text,Text,Text,Text> reduceDriver = 
+  private ReduceDriver<Text,Text,Text,Text> reduceDriver =
       ReduceDriver.newReduceDriver(reducer);
-  private MapReduceDriver<Text,Text,Text,Text,Text,Text> mapReduceDriver = 
+  private MapReduceDriver<Text,Text,Text,Text,Text,Text> mapReduceDriver =
       MapReduceDriver.newMapReduceDriver();
 
   static final Text DIR = new Text("dir");
   static final Text FILE = new Text("file");
-  
+
   /**
    * A mapper class which loads files / archives from distributed
    * cache and outputs the filenames as keys, and whether the cache item is a file
    * or directory ("file" or "dir") as value
    */
-  private static class TestDistributedCacheMapper  
+  private static class TestDistributedCacheMapper
     extends Mapper<Text,Text,Text,Text> {
 
     private List<Path> cachePaths;
 
-    protected void setup(Context context) 
+    protected void setup(Context context)
         throws IOException, InterruptedException {
       cachePaths = TestDistributedCacheUtils.createCachePathList(context);
     }
@@ -79,16 +79,16 @@
    * cache and outputs the filenames as keys, and whether the cache item is a file
    * or directory ("file" or "dir") as value
    */
-  private static class TestDistributedCacheReducer  
+  private static class TestDistributedCacheReducer
     extends Reducer<Text,Text,Text,Text> {
-  
+
     private List<Path> cachePaths;
-  
-    protected void setup(Context context) 
+
+    protected void setup(Context context)
         throws IOException, InterruptedException {
       cachePaths = TestDistributedCacheUtils.createCachePathList(context);
     }
-  
+
     @Override
     public void reduce(Text key, Iterable<Text> value, Context context)
         throws IOException, InterruptedException {
@@ -113,16 +113,16 @@
       return cachePaths;
     }
 
-    private static void outputCachePaths(List<Path> cachePaths, 
-        TaskInputOutputContext<Text,Text,Text,Text> context) 
+    private static void outputCachePaths(List<Path> cachePaths,
+        TaskInputOutputContext<Text,Text,Text,Text> context)
         throws IOException, InterruptedException {
       for (Path path: cachePaths) {
         outputPath("", path, context);
       }
     }
 
-    private static void outputPath(String parentPath, Path path, 
-        TaskInputOutputContext<Text,Text,Text,Text> context) 
+    private static void outputPath(String parentPath, Path path,
+        TaskInputOutputContext<Text,Text,Text,Text> context)
             throws IOException, InterruptedException {
       FileSystem fs = FileSystem.get(context.getConfiguration());
       FileStatus fstat = fs.getFileStatus(path);
@@ -199,7 +199,7 @@
     reduceDriver.withCacheFile("testfile")
       .withOutput(new Text("testfile"), new Text("file")).runTest(false);
   }
-  
+
   @Test
   public void testAddCacheFileToReducerUsingStaticMethod() throws Exception
   {
@@ -236,7 +236,7 @@
       .withOutput(new Text("testarchive.tar/d"), new Text("file"))
       .runTest(false);
   }
-  
+
   @Test
   public void testAddCacheArchiveToMapReduceUsingDriverMethod2() throws IOException
   {
diff --git a/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestMapDriver.java b/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestMapDriver.java
index 9236e5c..119614c 100644
--- a/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestMapDriver.java
+++ b/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestMapDriver.java
@@ -46,6 +46,7 @@
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
 import org.apache.hadoop.mrunit.ExpectedSuppliedException;
 import org.apache.hadoop.mrunit.types.Pair;
+import org.apache.hadoop.mrunit.types.UncomparableWritable;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -77,6 +78,14 @@
   }
 
   @Test
+  public void testUncomparable() throws IOException {
+    Object k = new UncomparableWritable(1);
+    Object v = new UncomparableWritable(2);
+    MapDriver.newMapDriver(new Mapper<Object, Object, Object, Object>())
+        .withInput(k, v).withOutput(k, v).runTest();
+  }
+
+  @Test
   public void testTestRun1() throws IOException {
     driver.withInput(new Text("foo"), new Text("bar"))
         .withOutput(new Text("foo"), new Text("bar")).runTest();
@@ -85,8 +94,7 @@
   @Test
   public void testTestRun2() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Expected no outputs; got 1 outputs., "
-            + "Received unexpected output (foo, bar) at position 0.)");
+        .expectAssertionErrorMessage("1 Error(s): (Expected no output; got 1 output(s).)");
     driver.withInput(new Text("foo"), new Text("bar")).runTest();
   }
 
@@ -130,8 +138,8 @@
   @Test
   public void testTestRun5() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (foo, somethingelse) at position 0.," 
-            + " Received unexpected output (foo, bar) at position 0.)");
+        .expectAssertionErrorMessage("1 Error(s): (Missing expected output (foo, somethingelse)"
+            + " at position 0, got (foo, bar).)");
     driver.withInput(new Text("foo"), new Text("bar"))
         .withOutput(new Text("foo"), new Text("somethingelse")).runTest(true);
   }
@@ -148,8 +156,8 @@
   @Test
   public void testTestRun6() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (someotherkey, bar) at position 0.,"
-            + " Received unexpected output (foo, bar) at position 0.)");
+        .expectAssertionErrorMessage("1 Error(s): (Missing expected output (someotherkey, bar)" +
+        		" at position 0, got (foo, bar).)");
     driver.withInput(new Text("foo"), new Text("bar"))
         .withOutput(new Text("someotherkey"), new Text("bar")).runTest(true);
   }
@@ -166,8 +174,8 @@
   @Test
   public void testTestRun7() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Matched expected output (foo, bar) but at "
-            + "incorrect position 0 (expected position 1), Missing expected output (someotherkey, bar) at position 0.)");
+        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (someotherkey, bar)" +
+        		" at position 0, got (foo, bar)., Missing expected output (foo, bar) at position 1.)");
     driver.withInput(new Text("foo"), new Text("bar"))
         .withOutput(new Text("someotherkey"), new Text("bar"))
         .withOutput(new Text("foo"), new Text("bar")).runTest(true);
@@ -194,7 +202,7 @@
 
     driver.withAll(inputs).withAllOutput(outputs).runTest();
   }
-  
+
   @Test
   public void testSetInput() {
     driver.setInput(new Pair<Text, Text>(new Text("foo"), new Text("bar")));
@@ -226,7 +234,7 @@
         .withCounter(MapperWithCounters.Counters.X, 1)
         .withCounter("category", "name", 1).runTest();
   }
-  
+
   @Test
   public void testWithCounterAndNoneMissing() throws IOException {
     MapDriver<Text, Text, Text, Text> driver = MapDriver.newMapDriver();
@@ -242,7 +250,7 @@
   @Test
   public void testWithCounterAndEnumCounterMissing() throws IOException {
     MapDriver<Text, Text, Text, Text> driver = MapDriver.newMapDriver();
-    
+
     thrown
         .expectAssertionErrorMessage("1 Error(s): (Actual counter ("
             + "\"org.apache.hadoop.mrunit.mapreduce.TestMapDriver$MapperWithCounters$Counters\",\"X\")"
@@ -258,7 +266,7 @@
   @Test
   public void testWithCounterAndStringCounterMissing() throws IOException {
     MapDriver<Text, Text, Text, Text> driver = MapDriver.newMapDriver();
-    
+
     thrown
     .expectAssertionErrorMessage("1 Error(s): (Actual counter ("
         + "\"category\",\"name\")"
@@ -316,19 +324,19 @@
 
   @Test
   public void testInputSplitDetails() throws IOException {
-    final MapDriver<NullWritable, NullWritable, Text, LongWritable> driver = 
+    final MapDriver<NullWritable, NullWritable, Text, LongWritable> driver =
         MapDriver.newMapDriver(new InputSplitDetailMapper());
     driver.withInput(NullWritable.get(), NullWritable.get())
       .withOutput(new Text("somefile"), new LongWritable(0L)).runTest();
   }
-  
+
   public static class InputSplitDetailMapper
     extends Mapper<NullWritable, NullWritable, Text, LongWritable> {
     @Override
-    protected void map(NullWritable key, NullWritable value, Context context) 
+    protected void map(NullWritable key, NullWritable value, Context context)
         throws IOException, InterruptedException {
       FileSplit split = (FileSplit)context.getInputSplit();
-      context.write(new Text(split.getPath().toString()), 
+      context.write(new Text(split.getPath().toString()),
           new LongWritable(split.getLength()));
     }
   }
@@ -394,9 +402,9 @@
         public void close(TaskAttemptContext context) throws IOException,
             InterruptedException {
           writer.close(context);
-        }        
+        }
       };
-    }    
+    }
   }
   @Test
   public void testCountingOutputFormat() throws IOException {
@@ -426,7 +434,7 @@
     driver.withOutput(new LongWritable(), new Text("a\t1"));
     driver.runTest();
   }
-  
+
 
 
   @Test
@@ -441,7 +449,7 @@
     assertNotNull(mapper.getMapInputPath());
     assertEquals(mapInputPath.getName(), mapper.getMapInputPath().getName());
   }
-  
+
   @Test
   public void textMockContext() throws IOException, InterruptedException {
     thrown.expectMessage(RuntimeException.class, "Injected!");
@@ -453,10 +461,10 @@
     driver.withOutput(new Text("a"), new Text("1"));
     driver.runTest();
   }
-  
+
   static class TaskAttemptMapper extends Mapper<Text,NullWritable,Text,NullWritable> {
     @Override
-    protected void map(Text key, NullWritable value, Context context) 
+    protected void map(Text key, NullWritable value, Context context)
         throws IOException,InterruptedException {
       context.write(new Text(context.getTaskAttemptID().toString()), NullWritable.get());
     }
@@ -464,7 +472,7 @@
 
   @Test
   public void testWithTaskAttemptUse() throws IOException {
-    final MapDriver<Text,NullWritable,Text,NullWritable> driver 
+    final MapDriver<Text,NullWritable,Text,NullWritable> driver
       = MapDriver.newMapDriver(new TaskAttemptMapper());
     driver.withInput(new Text("anything"), NullWritable.get()).withOutput(
         new Text("attempt__0000_m_000000_0"), NullWritable.get()).runTest();
@@ -483,5 +491,5 @@
     thrown.expectMessage(IllegalStateException.class, "Driver reuse not allowed");
     driver.withAll(inputs).withAllOutput(outputs).runTest();
   }
-  
+
 }
diff --git a/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestMapReduceDriver.java b/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestMapReduceDriver.java
index f5b7d36..0f6a521 100644
--- a/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestMapReduceDriver.java
+++ b/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestMapReduceDriver.java
@@ -31,6 +31,8 @@
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.serializer.JavaSerializationComparator;
+import org.apache.hadoop.mapred.lib.IdentityMapper;
+import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
@@ -41,6 +43,7 @@
 import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer;
 import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
 import org.apache.hadoop.mrunit.ExpectedSuppliedException;
+import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver;
 import org.apache.hadoop.mrunit.TestMapReduceDriver.FirstCharComparator;
 import org.apache.hadoop.mrunit.TestMapReduceDriver.SecondCharComparator;
 import org.apache.hadoop.mrunit.mapreduce.TestMapDriver.ConfigurationMapper;
@@ -48,6 +51,7 @@
 import org.apache.hadoop.mrunit.types.KeyValueReuseList;
 import org.apache.hadoop.mrunit.types.Pair;
 import org.apache.hadoop.mrunit.types.TestWritable;
+import org.apache.hadoop.mrunit.types.UncomparableWritable;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -96,6 +100,16 @@
   }
 
   @Test
+  public void testUncomparable() throws IOException {
+    Text k = new Text("test");
+    Object v = new UncomparableWritable(2);
+    MapReduceDriver.newMapReduceDriver(
+        new Mapper<Text, Object,Text, Object>(),
+        new Reducer<Text, Object,Text, Object>())
+        .withInput(k, v).withOutput(k, v).runTest();
+  }
+
+  @Test
   public void testTestRun1() throws IOException {
     driver.withInput(new Text("foo"), new LongWritable(FOO_IN_A))
         .withInput(new Text("foo"), new LongWritable(FOO_IN_B))
@@ -116,10 +130,8 @@
   @Test
   public void testTestRun3() throws IOException {
     thrown.expectAssertionErrorMessage("2 Error(s)");
-    thrown.expectAssertionErrorMessage("Matched expected output (foo, 52) but "
-        + "at incorrect position 1 (expected position 0)");
-    thrown.expectAssertionErrorMessage("Matched expected output (bar, 12) but "
-        + "at incorrect position 0 (expected position 1)");
+    thrown.expectAssertionErrorMessage("Missing expected output (foo, 52) at position 0, got (bar, 12).");
+    thrown.expectAssertionErrorMessage("Missing expected output (bar, 12) at position 1, got (foo, 52).");
     driver.withInput(new Text("foo"), new LongWritable(FOO_IN_A))
         .withInput(new Text("bar"), new LongWritable(BAR_IN))
         .withInput(new Text("foo"), new LongWritable(FOO_IN_B))
@@ -140,7 +152,7 @@
 
     driver.withAll(inputs).withAllOutput(outputs).runTest();
   }
-  
+
   @Test
   public void testTestRun3OrderInsensitive() throws IOException {
     driver.withInput(new Text("foo"), new LongWritable(FOO_IN_A))
@@ -463,7 +475,7 @@
         .withCounter("category", "count", 1).withCounter("category", "sum", 1)
         .runTest();
   }
-  
+
   @Test
   public void testWithCounterAndNoneMissing() throws IOException {
     MapReduceDriver<Text, Text, Text, Text, Text, Text> driver = MapReduceDriver
@@ -616,10 +628,10 @@
 
   @Test
   public void testGroupingComparatorBehaviour2() throws IOException {
-    // this test fails pre-MRUNIT-127 because of the incorrect 
-    // grouping of reduce keys in "shuffle". 
+    // this test fails pre-MRUNIT-127 because of the incorrect
+    // grouping of reduce keys in "shuffle".
     // MapReduce doesn't group keys which aren't in a contiguous
-    // range when sorted by their sorting comparator. 
+    // range when sorted by their sorting comparator.
     driver.withInput(new Text("1A"),new LongWritable(1L))
       .withInput(new Text("2A"),new LongWritable(1L))
       .withInput(new Text("1B"),new LongWritable(1L))
@@ -636,12 +648,12 @@
 
   @Test
   public void testUseOfWritableRegisteredComparator() throws IOException {
-    
+
     // this test should use the comparator registered inside TestWritable
     // to output the keys in reverse order
-    MapReduceDriver<TestWritable,Text,TestWritable,Text,TestWritable,Text> driver 
+    MapReduceDriver<TestWritable,Text,TestWritable,Text,TestWritable,Text> driver
       = MapReduceDriver.newMapReduceDriver(new Mapper(), new Reducer());
-    
+
     driver.withInput(new TestWritable("A1"), new Text("A1"))
       .withInput(new TestWritable("A2"), new Text("A2"))
       .withInput(new TestWritable("A3"), new Text("A3"))
diff --git a/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestReduceDriver.java b/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestReduceDriver.java
index 4045eb5..c7dd4eb 100644
--- a/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestReduceDriver.java
+++ b/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestReduceDriver.java
@@ -18,10 +18,10 @@
 
 package org.apache.hadoop.mrunit.mapreduce;
 
-import static org.apache.hadoop.mrunit.ExtendedAssert.*;
-import static org.junit.Assert.*;
-import static org.mockito.Matchers.*;
-import static org.mockito.Mockito.*;
+import static org.apache.hadoop.mrunit.ExtendedAssert.assertListEquals;
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.doThrow;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -42,11 +42,15 @@
 import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer;
 import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
 import org.apache.hadoop.mrunit.ExpectedSuppliedException;
+import org.apache.hadoop.mrunit.mapreduce.TestWordCount.Reduce;
 import org.apache.hadoop.mrunit.types.Pair;
+import org.apache.hadoop.mrunit.types.UncomparableWritable;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 
+import com.google.common.collect.Lists;
+
 public class TestReduceDriver {
 
   private static final int IN_A = 4;
@@ -82,6 +86,14 @@
   }
 
   @Test
+  public void testUncomparable() throws IOException {
+    Object k = new UncomparableWritable(1);
+    Object v = new UncomparableWritable(2);
+    ReduceDriver.newReduceDriver(new Reducer<Object, Object, Object, Object>())
+        .withInput(k, Lists.newArrayList(v)).withOutput(k, v).runTest();
+  }
+
+  @Test
   public void testTestRun1() throws IOException {
     driver.withInputKey(new Text("foo")).withInputValue(new LongWritable(IN_A))
         .withInputValue(new LongWritable(IN_B))
@@ -91,8 +103,7 @@
   @Test
   public void testTestRun2() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (bar, 10) at position 0., "
-            + "Received unexpected output (foo, 10) at position 0.)");
+        .expectAssertionErrorMessage("1 Error(s): (Missing expected output (bar, 10) at position 0, got (foo, 10).)");
     driver.withInputKey(new Text("foo")).withInputValue(new LongWritable(IN_A))
         .withInputValue(new LongWritable(IN_B))
         .withOutput(new Text("bar"), new LongWritable(OUT_VAL)).runTest(true);
@@ -111,8 +122,7 @@
   @Test
   public void testTestRun3() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (foo, 12) at position 0., "
-            + "Received unexpected output (foo, 10) at position 0.)");
+        .expectAssertionErrorMessage("1 Error(s): (Missing expected output (foo, 12) at position 0, got (foo, 10).)");
     driver.withInputKey(new Text("foo")).withInputValue(new LongWritable(IN_A))
         .withInputValue(new LongWritable(IN_B))
         .withOutput(new Text("foo"), new LongWritable(INCORRECT_OUT))
@@ -133,8 +143,7 @@
   @Test
   public void testTestRun4() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (foo, 4) at position 0., "
-            + "Received unexpected output (foo, 10) at position 0.)");
+        .expectAssertionErrorMessage("1 Error(s): (Missing expected output (foo, 4) at position 0, got (foo, 10).)");
     driver.withInputKey(new Text("foo")).withInputValue(new LongWritable(IN_A))
         .withInputValue(new LongWritable(IN_B))
         .withOutput(new Text("foo"), new LongWritable(IN_A)).runTest(true);
@@ -152,10 +161,9 @@
 
   @Test
   public void testTestRun5() throws IOException {
-    thrown.expectAssertionErrorMessage("3 Error(s)");
+    thrown.expectAssertionErrorMessage("2 Error(s)");
+    thrown.expectAssertionErrorMessage("Missing expected output (foo, 4) at position 0, got (foo, 10).");
     thrown.expectAssertionErrorMessage("Missing expected output (foo, 6) at position 1.");
-    thrown.expectAssertionErrorMessage("Missing expected output (foo, 4) at position 0.");
-    thrown.expectAssertionErrorMessage("Received unexpected output (foo, 10) at position 0.");
     driver.withInputKey(new Text("foo")).withInputValue(new LongWritable(IN_A))
         .withInputValue(new LongWritable(IN_B))
         .withOutput(new Text("foo"), new LongWritable(IN_A))
@@ -197,8 +205,8 @@
   @Test
   public void testTestRun7() throws IOException {
     thrown
-        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (bar, 10) at position 0., "
-            + "Matched expected output (foo, 10) but at incorrect position 0 (expected position 1))");
+        .expectAssertionErrorMessage("2 Error(s): (Missing expected output (bar, 10)" +
+        		" at position 0, got (foo, 10)., Missing expected output (foo, 10) at position 1.)");
     driver.withInputKey(new Text("foo")).withInputValue(new LongWritable(IN_A))
         .withInputValue(new LongWritable(IN_B))
         .withOutput(new Text("bar"), new LongWritable(OUT_VAL))
@@ -355,7 +363,7 @@
         .withCounter("category", "count", 1).withCounter("category", "sum", 2)
         .runTest();
   }
-  
+
   @Test
   public void testWithCounterAndNoneMissing() throws IOException {
     final ReduceDriver<Text, Text, Text, Text> driver = ReduceDriver
@@ -490,7 +498,7 @@
     driver.withInputKey(1).withInputValue(new IntWritable(2))
         .withOutput(1, new IntWritable(2)).runTest();
   }
-  
+
   @Test
   public void testOutputFormat() throws IOException {
     driver.withOutputFormat(SequenceFileOutputFormat.class,
@@ -513,7 +521,7 @@
     driver.withOutput(new LongWritable(), new Text("a\t3"));
     driver.runTest();
   }
-  
+
   @Test
   public void textMockContext() throws IOException, InterruptedException {
     thrown.expectMessage(RuntimeException.class, "Injected!");
@@ -529,7 +537,7 @@
 
   static class TaskAttemptReducer extends Reducer<Text,NullWritable,Text,NullWritable> {
     @Override
-    protected void reduce(Text key, Iterable<NullWritable> values, Context context) 
+    protected void reduce(Text key, Iterable<NullWritable> values, Context context)
         throws IOException,InterruptedException {
       context.write(new Text(context.getTaskAttemptID().toString()), NullWritable.get());
     }
@@ -537,7 +545,7 @@
 
   @Test
   public void testWithTaskAttemptUse() throws IOException {
-    final ReduceDriver<Text,NullWritable,Text,NullWritable> driver 
+    final ReduceDriver<Text,NullWritable,Text,NullWritable> driver
       = ReduceDriver.newReduceDriver(new TaskAttemptReducer());
     ReduceFeeder<Text, NullWritable> reduceFeeder = new ReduceFeeder<Text, NullWritable>(driver.getConfiguration());
 
diff --git a/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestStatefulMapReduce.java b/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestStatefulMapReduce.java
index ad356b8..6a01324 100644
--- a/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestStatefulMapReduce.java
+++ b/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestStatefulMapReduce.java
@@ -33,10 +33,10 @@
 
   @Test
   public void testClosedFormMapReduce() throws IOException {
-    
-    MapReduceDriver<LongWritable, Text, Text, IntWritable, Text, IntWritable> mapReduceDriver 
+
+    MapReduceDriver<LongWritable, Text, Text, IntWritable, Text, IntWritable> mapReduceDriver
       = MapReduceDriver.newMapReduceDriver(new StatefulMapper(), new Reducer());
-    
+
     mapReduceDriver.addInput(new LongWritable(1L), new Text("hello"));
     mapReduceDriver.addInput(new LongWritable(2L), new Text("schmo"));
     mapReduceDriver.withOutput(new Text("SomeKey"), new IntWritable(2));
diff --git a/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestWordCount.java b/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestWordCount.java
index 177c0e6..8cbbd1d 100644
--- a/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestWordCount.java
+++ b/src/test/java/org/apache/hadoop/mrunit/mapreduce/TestWordCount.java
@@ -36,14 +36,14 @@
   private static final String FILE02 = "Hello Hadoop Goodbye Hadoop";
   private static final int ONE = 1;
   private static final int TWO = 2;
-  
+
   private Mapper<LongWritable, Text, Text, IntWritable> mapper;
   private Reducer<Text, IntWritable, Text, IntWritable> reducer;
   private MapReduceDriver<LongWritable, Text, Text, IntWritable, Text, IntWritable> driver;
   private MapDriver<LongWritable, Text, Text, IntWritable> mapDriver;
   private ReduceDriver<Text, IntWritable, Text, IntWritable> reduceDriver;
   private List<Pair<Text, IntWritable>> expectedOutput;
-  
+
   @Before
   public void setup() {
     mapper = new Map();
@@ -51,7 +51,7 @@
     driver = MapReduceDriver.newMapReduceDriver(mapper, reducer);
     mapDriver = MapDriver.newMapDriver(mapper);
     reduceDriver = ReduceDriver.newReduceDriver(reducer);
-    
+
     expectedOutput = new ArrayList<Pair<Text, IntWritable>>();
     expectedOutput.add(new Pair<Text, IntWritable>(new Text("Bye"), new IntWritable(ONE)));
     expectedOutput.add(new Pair<Text, IntWritable>(new Text("Goodbye"), new IntWritable(ONE)));
@@ -83,21 +83,21 @@
   public void TestReduceDriver() throws IOException {
     final List<IntWritable> input1 = new ArrayList<IntWritable>();
     input1.add(new IntWritable(ONE));
-    
+
     final List<IntWritable> input2 = new ArrayList<IntWritable>();
     input2.add(new IntWritable(ONE));
     input2.add(new IntWritable(ONE));
-    
+
     final List<Pair<Text, List<IntWritable>>> inputs = new ArrayList<Pair<Text, List<IntWritable>>>();
     inputs.add(new Pair<Text, List<IntWritable>>(new Text("Bye"), input1));
     inputs.add(new Pair<Text, List<IntWritable>>(new Text("Goodbye"), input1));
     inputs.add(new Pair<Text, List<IntWritable>>(new Text("Hadoop"), input2));
-    inputs.add(new Pair<Text, List<IntWritable>>(new Text("Hello"), input2));    
+    inputs.add(new Pair<Text, List<IntWritable>>(new Text("Hello"), input2));
     inputs.add(new Pair<Text, List<IntWritable>>(new Text("World"), input2));
-    
+
     reduceDriver.withAll(inputs).withAllOutput(expectedOutput).runTest(true);
   }
-  
+
   @Test
   public void TestRun() throws IOException {
     final List<Pair<LongWritable, Text>> inputs = new ArrayList<Pair<LongWritable, Text>>();
diff --git a/src/test/java/org/apache/hadoop/mrunit/testutil/TestTemporaryPath.java b/src/test/java/org/apache/hadoop/mrunit/testutil/TestTemporaryPath.java
index 77bde65..ef69cd2 100644
--- a/src/test/java/org/apache/hadoop/mrunit/testutil/TestTemporaryPath.java
+++ b/src/test/java/org/apache/hadoop/mrunit/testutil/TestTemporaryPath.java
@@ -79,7 +79,7 @@
     tmpDir.copyResourceFile("testdir/data.txt");
     assertThat(dest.exists(), is(true));
   }
-  
+
   @Test
   public void testGetDefaultConfiguration() {
     Configuration conf = tmpDir.getDefaultConfiguration();
diff --git a/src/test/java/org/apache/hadoop/mrunit/types/UncomparableWritable.java b/src/test/java/org/apache/hadoop/mrunit/types/UncomparableWritable.java
new file mode 100644
index 0000000..ad7d51d
--- /dev/null
+++ b/src/test/java/org/apache/hadoop/mrunit/types/UncomparableWritable.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mrunit.types;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.io.Writable;
+
+/**
+ * Almost the simplest not comparable writable. Used for testing
+ * that implementing {@link Comparable} is indeed not required.
+ */
+public class UncomparableWritable implements Writable {
+
+  private int value;
+
+  public UncomparableWritable() {
+  }
+
+  public UncomparableWritable(int value) {
+    this.value = value;
+  }
+
+  @Override
+  public void readFields(DataInput input) throws IOException {
+    value = input.readInt();
+  }
+
+  @Override
+  public void write(DataOutput output) throws IOException {
+    output.writeInt(value);
+  }
+
+  @Override
+  public int hashCode() {
+    return value;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    return obj instanceof UncomparableWritable &&
+        ((UncomparableWritable)obj).value == this.value;
+  }
+
+  @Override
+  public String toString() {
+    return super.toString() + "(value="+value+")";
+  };
+
+}
diff --git a/src/test/resources/log4j.properties b/src/test/resources/log4j.properties
new file mode 100644
index 0000000..4c15f1b
--- /dev/null
+++ b/src/test/resources/log4j.properties
@@ -0,0 +1,6 @@
+log4j.rootLogger=INFO, stdout
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
\ No newline at end of file