AVRO-1828: Add EditorConfig file and cleanup of whitespace violations
diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..4efdf7d
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+root = true
+
+[*]
+charset = utf-8
+end_of_line = lf
+insert_final_newline = true
+
+[*.{java,xml,sh}]
+indent_style = space
+indent_size = 2
+trim_trailing_whitespace=true
+
+#[*.scala]
+#indent_style = space
+#indent_size = 2
+
+#[*.py]
+#indent_style = space
+#indent_size = 4
diff --git a/CHANGES.txt b/CHANGES.txt
index 3a8f4e8..a8ed79f 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -31,6 +31,8 @@
 
     AVRO-1835: Running tests using JDK 1.8 complains about MaxPermSize (nielsbasjes)
 
+    AVRO-1828: Add EditorConfig file and cleanup of whitespace violations (nielsbasjes)
+
   BUG FIXES
 
     AVRO-1493. Java: Avoid the "Turkish Locale Problem". Schema fingerprints are
diff --git a/build.sh b/build.sh
index 37b3017..a373c2c 100755
--- a/build.sh
+++ b/build.sh
@@ -15,9 +15,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-set -e						  # exit on error
+set -e                # exit on error
 
-cd `dirname "$0"`				  # connect to root
+cd `dirname "$0"`     # connect to root
 
 VERSION=`cat share/VERSION.txt`
 
@@ -31,202 +31,199 @@
   usage
 fi
 
-set -x						  # echo commands
+set -x                # echo commands
 
 for target in "$@"
 do
-
-case "$target" in
+  case "$target" in
 
     test)
-	# run lang-specific tests
-        (cd lang/java; mvn test)
-	(cd lang/py; ant test)
-	(cd lang/py3; python3 setup.py test)
-	(cd lang/c; ./build.sh test)
-	(cd lang/c++; ./build.sh test)
-	(cd lang/csharp; ./build.sh test)
-	(cd lang/js; ./build.sh test)
-	(cd lang/ruby; ./build.sh test)
-	(cd lang/php; ./build.sh test)
-	(cd lang/perl; perl ./Makefile.PL && make test)
+      # run lang-specific tests
+      (cd lang/java; mvn test)
+      (cd lang/py; ant test)
+      (cd lang/py3; python3 setup.py test)
+      (cd lang/c; ./build.sh test)
+      (cd lang/c++; ./build.sh test)
+      (cd lang/csharp; ./build.sh test)
+      (cd lang/js; ./build.sh test)
+      (cd lang/ruby; ./build.sh test)
+      (cd lang/php; ./build.sh test)
+      (cd lang/perl; perl ./Makefile.PL && make test)
 
-	# create interop test data
-        mkdir -p build/interop/data
-	(cd lang/java/avro; mvn -P interop-data-generate generate-resources)
-	(cd lang/py; ant interop-data-generate)
-	(cd lang/c; ./build.sh interop-data-generate)
-	#(cd lang/c++; make interop-data-generate)
-	(cd lang/ruby; rake generate_interop)
-	(cd lang/php; ./build.sh interop-data-generate)
+      # create interop test data
+      mkdir -p build/interop/data
+      (cd lang/java/avro; mvn -P interop-data-generate generate-resources)
+      (cd lang/py; ant interop-data-generate)
+      (cd lang/c; ./build.sh interop-data-generate)
+      #(cd lang/c++; make interop-data-generate)
+      (cd lang/ruby; rake generate_interop)
+      (cd lang/php; ./build.sh interop-data-generate)
 
-	# run interop data tests
-	(cd lang/java; mvn test -P interop-data-test)
-	(cd lang/py; ant interop-data-test)
-	(cd lang/c; ./build.sh interop-data-test)
-	#(cd lang/c++; make interop-data-test)
-	(cd lang/ruby; rake interop)
-	(cd lang/php; ./build.sh test-interop)
+      # run interop data tests
+      (cd lang/java; mvn test -P interop-data-test)
+      (cd lang/py; ant interop-data-test)
+      (cd lang/c; ./build.sh interop-data-test)
+      #(cd lang/c++; make interop-data-test)
+      (cd lang/ruby; rake interop)
+      (cd lang/php; ./build.sh test-interop)
 
-	# java needs to package the jars for the interop rpc tests
-        (cd lang/java; mvn package -DskipTests)
-	# run interop rpc test
-        /bin/bash share/test/interop/bin/test_rpc_interop.sh
-
-	;;
+      # java needs to package the jars for the interop rpc tests
+      (cd lang/java; mvn package -DskipTests)
+      # run interop rpc test
+      /bin/bash share/test/interop/bin/test_rpc_interop.sh
+    ;;
 
     dist)
-        # ensure version matches
-        # FIXME: enforcer is broken:MENFORCER-42
-        # mvn enforcer:enforce -Davro.version=$VERSION
-        
-	# build source tarball
-        mkdir -p build
+      # ensure version matches
+      # FIXME: enforcer is broken:MENFORCER-42
+      # mvn enforcer:enforce -Davro.version=$VERSION
 
-        SRC_DIR=avro-src-$VERSION
-        DOC_DIR=avro-doc-$VERSION
+      # build source tarball
+      mkdir -p build
 
-	rm -rf build/${SRC_DIR}
-	if [ -d .svn ];
-	then
-		svn export --force . build/${SRC_DIR}
-	elif [ -d .git ];
-	then
-		mkdir -p build/${SRC_DIR}
-		git archive HEAD | tar -x -C build/${SRC_DIR}
-	else
-		echo "Not SVN and not GIT .. cannot continue"
-		exit -1;
-	fi
+      SRC_DIR=avro-src-$VERSION
+      DOC_DIR=avro-doc-$VERSION
 
-	#runs RAT on artifacts
-        mvn -N -P rat antrun:run
+      rm -rf build/${SRC_DIR}
+      if [ -d .svn ];
+      then
+        svn export --force . build/${SRC_DIR}
+      elif [ -d .git ];
+      then
+        mkdir -p build/${SRC_DIR}
+        git archive HEAD | tar -x -C build/${SRC_DIR}
+      else
+        echo "Not SVN and not GIT .. cannot continue"
+        exit -1;
+      fi
 
-	mkdir -p dist
-        (cd build; tar czf ../dist/${SRC_DIR}.tar.gz ${SRC_DIR})
+      #runs RAT on artifacts
+      mvn -N -P rat antrun:run
 
-	# build lang-specific artifacts
-        
-	(cd lang/java; mvn package -DskipTests -Dhadoop.version=1;
-	  rm -rf mapred/target/{classes,test-classes}/;
-	  rm -rf trevni/avro/target/{classes,test-classes}/;
-	  mvn -P dist package -DskipTests -Davro.version=$VERSION javadoc:aggregate)
-        (cd lang/java/trevni/doc; mvn site)
-        (mvn -N -P copy-artifacts antrun:run) 
+      mkdir -p dist
+      (cd build; tar czf ../dist/${SRC_DIR}.tar.gz ${SRC_DIR})
 
-	(cd lang/py; ant dist)
-	(cd lang/py3; python3 setup.py sdist; cp -r dist ../../dist/py3)
+      # build lang-specific artifacts
 
-	(cd lang/c; ./build.sh dist)
+      (cd lang/java; mvn package -DskipTests -Dhadoop.version=1;
+      rm -rf mapred/target/{classes,test-classes}/;
+      rm -rf trevni/avro/target/{classes,test-classes}/;
+      mvn -P dist package -DskipTests -Davro.version=$VERSION javadoc:aggregate)
+      (cd lang/java/trevni/doc; mvn site)
+      (mvn -N -P copy-artifacts antrun:run)
 
-	(cd lang/c++; ./build.sh dist)
+      (cd lang/py; ant dist)
+      (cd lang/py3; python3 setup.py sdist; cp -r dist ../../dist/py3)
 
-	(cd lang/csharp; ./build.sh dist)
+      (cd lang/c; ./build.sh dist)
 
-	(cd lang/js; ./build.sh dist)
+      (cd lang/c++; ./build.sh dist)
 
-	(cd lang/ruby; ./build.sh dist)
+      (cd lang/csharp; ./build.sh dist)
 
-	(cd lang/php; ./build.sh dist)
+      (cd lang/js; ./build.sh dist)
 
-        mkdir -p dist/perl
-	(cd lang/perl; perl ./Makefile.PL && make dist)
-        cp lang/perl/Avro-$VERSION.tar.gz dist/perl/
+      (cd lang/ruby; ./build.sh dist)
 
-	# build docs
-	(cd doc; ant)
-        # add LICENSE and NOTICE for docs
-        mkdir -p build/$DOC_DIR
-        cp doc/LICENSE build/$DOC_DIR
-        cp doc/NOTICE build/$DOC_DIR
-	(cd build; tar czf ../dist/avro-doc-$VERSION.tar.gz $DOC_DIR)
+      (cd lang/php; ./build.sh dist)
 
-	cp DIST_README.txt dist/README.txt
-	;;
+      mkdir -p dist/perl
+      (cd lang/perl; perl ./Makefile.PL && make dist)
+      cp lang/perl/Avro-$VERSION.tar.gz dist/perl/
+
+      # build docs
+      (cd doc; ant)
+      # add LICENSE and NOTICE for docs
+      mkdir -p build/$DOC_DIR
+      cp doc/LICENSE build/$DOC_DIR
+      cp doc/NOTICE build/$DOC_DIR
+      (cd build; tar czf ../dist/avro-doc-$VERSION.tar.gz $DOC_DIR)
+
+      cp DIST_README.txt dist/README.txt
+      ;;
 
     sign)
+      set +x
 
-	set +x
+      echo -n "Enter password: "
+      stty -echo
+      read password
+      stty echo
 
-	echo -n "Enter password: "
-	stty -echo
-	read password
-	stty echo
+      for f in $(find dist -type f \
+        \! -name '*.md5' \! -name '*.sha1' \
+        \! -name '*.asc' \! -name '*.txt' );
+      do
+        (cd `dirname $f`; md5sum `basename $f`) > $f.md5
+        (cd `dirname $f`; sha1sum `basename $f`) > $f.sha1
+        gpg --passphrase $password --armor --output $f.asc --detach-sig $f
+      done
 
-	for f in $(find dist -type f \
-	    \! -name '*.md5' \! -name '*.sha1' \
-	    \! -name '*.asc' \! -name '*.txt' );
-	do
-	    (cd `dirname $f`; md5sum `basename $f`) > $f.md5
-	    (cd `dirname $f`; sha1sum `basename $f`) > $f.sha1
-	    gpg --passphrase $password --armor --output $f.asc --detach-sig $f
-	done
-
-	set -x
-	;;
+      set -x
+      ;;
 
     clean)
-	rm -rf build dist
-	(cd doc; ant clean)
+      rm -rf build dist
+      (cd doc; ant clean)
 
-        (mvn clean)         
+      (mvn clean)
 
-	(cd lang/py; ant clean)
-	(cd lang/py3; python3 setup.py clean)
+      (cd lang/py; ant clean)
+      (cd lang/py3; python3 setup.py clean)
 
-	(cd lang/c; ./build.sh clean)
+      (cd lang/c; ./build.sh clean)
 
-	(cd lang/c++; ./build.sh clean)
+      (cd lang/c++; ./build.sh clean)
 
-	(cd lang/csharp; ./build.sh clean)
+      (cd lang/csharp; ./build.sh clean)
 
-	(cd lang/js; ./build.sh clean)
+      (cd lang/js; ./build.sh clean)
 
-	(cd lang/ruby; ./build.sh clean)
+      (cd lang/ruby; ./build.sh clean)
 
-	(cd lang/php; ./build.sh clean)
+      (cd lang/php; ./build.sh clean)
 
-	(cd lang/perl; [ ! -f Makefile ] || make clean)
-	;;
+      (cd lang/perl; [ ! -f Makefile ] || make clean)
+      ;;
 
     docker)
-        docker build -t avro-build share/docker
-        if [ "$(uname -s)" == "Linux" ]; then
-          USER_NAME=${SUDO_USER:=$USER}
-          USER_ID=$(id -u $USER_NAME)
-          GROUP_ID=$(id -g $USER_NAME)
-        else # boot2docker uid and gid
-          USER_NAME=$USER
-          USER_ID=1000
-          GROUP_ID=50
-        fi
-        docker build -t avro-build-${USER_NAME} - <<UserSpecificDocker
+      docker build -t avro-build share/docker
+      if [ "$(uname -s)" == "Linux" ]; then
+        USER_NAME=${SUDO_USER:=$USER}
+        USER_ID=$(id -u $USER_NAME)
+        GROUP_ID=$(id -g $USER_NAME)
+      else # boot2docker uid and gid
+        USER_NAME=$USER
+        USER_ID=1000
+        GROUP_ID=50
+      fi
+      docker build -t avro-build-${USER_NAME} - <<UserSpecificDocker
 FROM avro-build
 RUN groupadd -g ${GROUP_ID} ${USER_NAME} || true
 RUN useradd -g ${GROUP_ID} -u ${USER_ID} -k /root -m ${USER_NAME}
 ENV HOME /home/${USER_NAME}
 UserSpecificDocker
-        # By mapping the .m2 directory you can do an mvn install from
-        # within the container and use the result on your normal
-        # system.  And this also is a significant speedup in subsequent
-        # builds because the dependencies are downloaded only once.
-        docker run --rm=true -t -i \
-          -v ${PWD}:/home/${USER_NAME}/avro \
-          -w /home/${USER_NAME}/avro \
-          -v ${HOME}/.m2:/home/${USER_NAME}/.m2 \
-          -v ${HOME}/.gnupg:/home/${USER_NAME}/.gnupg \
-          -u ${USER_NAME} \
-          avro-build-${USER_NAME}
-        ;;
+      # By mapping the .m2 directory you can do an mvn install from
+      # within the container and use the result on your normal
+      # system.  And this also is a significant speedup in subsequent
+      # builds because the dependencies are downloaded only once.
+      docker run --rm=true -t -i \
+        -v ${PWD}:/home/${USER_NAME}/avro \
+        -w /home/${USER_NAME}/avro \
+        -v ${HOME}/.m2:/home/${USER_NAME}/.m2 \
+        -v ${HOME}/.gnupg:/home/${USER_NAME}/.gnupg \
+        -u ${USER_NAME} \
+        avro-build-${USER_NAME}
+      ;;
 
     rat)
-        mvn test -Dmaven.main.skip=true -Dmaven.test.skip=true -DskipTests=true -P rat -pl :avro-toplevel
-        ;;
+      mvn test -Dmaven.main.skip=true -Dmaven.test.skip=true -DskipTests=true -P rat -pl :avro-toplevel
+      ;;
 
     *)
-        usage
-        ;;
-esac
+      usage
+      ;;
+  esac
 
 done
 
diff --git a/doc/build.xml b/doc/build.xml
index fe21c96..1e7c058 100644
--- a/doc/build.xml
+++ b/doc/build.xml
@@ -18,10 +18,10 @@
   - under the License.
   -->
 <project name="doc" default="doc" basedir=".">
-  
+
   <!-- Load user's default properties. -->
   <property file="${user.home}/build.properties" />
-  
+
   <loadresource property="version">
     <file file="${basedir}/../share/VERSION.txt"/>
   </loadresource>
diff --git a/doc/examples/java-example/src/main/java/example/GenericMain.java b/doc/examples/java-example/src/main/java/example/GenericMain.java
index 87a445c..2d51a8d 100644
--- a/doc/examples/java-example/src/main/java/example/GenericMain.java
+++ b/doc/examples/java-example/src/main/java/example/GenericMain.java
@@ -33,39 +33,39 @@
 import org.apache.avro.io.DatumWriter;
 
 public class GenericMain {
-	public static void main(String[] args) throws IOException {
-		Schema schema = new Parser().parse(new File("/home/skye/code/cloudera/avro/doc/examples/user.avsc"));
-		
-		GenericRecord user1 = new GenericData.Record(schema);
-		user1.put("name", "Alyssa");
-		user1.put("favorite_number", 256);
-		// Leave favorite color null
-		
-		GenericRecord user2 = new GenericData.Record(schema);
-		user2.put("name", "Ben");
-		user2.put("favorite_number", 7);
-		user2.put("favorite_color", "red");
-		
-		// Serialize user1 and user2 to disk
-		File file = new File("users.avro");
-		DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema);
-		DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<GenericRecord>(datumWriter);
-		dataFileWriter.create(schema, file);
-		dataFileWriter.append(user1);
-		dataFileWriter.append(user2);
-		dataFileWriter.close();
+  public static void main(String[] args) throws IOException {
+    Schema schema = new Parser().parse(new File("/home/skye/code/cloudera/avro/doc/examples/user.avsc"));
 
-		// Deserialize users from disk
-		DatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(schema);
-		DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(file, datumReader);
-		GenericRecord user = null;
-		while (dataFileReader.hasNext()) {
-			// Reuse user object by passing it to next(). This saves us from
-			// allocating and garbage collecting many objects for files with
-			// many items.
-			user = dataFileReader.next(user);
-			System.out.println(user);
-		}
-		
-	}
+    GenericRecord user1 = new GenericData.Record(schema);
+    user1.put("name", "Alyssa");
+    user1.put("favorite_number", 256);
+    // Leave favorite color null
+
+    GenericRecord user2 = new GenericData.Record(schema);
+    user2.put("name", "Ben");
+    user2.put("favorite_number", 7);
+    user2.put("favorite_color", "red");
+
+    // Serialize user1 and user2 to disk
+    File file = new File("users.avro");
+    DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema);
+    DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<GenericRecord>(datumWriter);
+    dataFileWriter.create(schema, file);
+    dataFileWriter.append(user1);
+    dataFileWriter.append(user2);
+    dataFileWriter.close();
+
+    // Deserialize users from disk
+    DatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(schema);
+    DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(file, datumReader);
+    GenericRecord user = null;
+    while (dataFileReader.hasNext()) {
+      // Reuse user object by passing it to next(). This saves us from
+      // allocating and garbage collecting many objects for files with
+      // many items.
+      user = dataFileReader.next(user);
+      System.out.println(user);
+    }
+
+  }
 }
diff --git a/doc/examples/java-example/src/main/java/example/SpecificMain.java b/doc/examples/java-example/src/main/java/example/SpecificMain.java
index 2b5fd0f..e068eed 100644
--- a/doc/examples/java-example/src/main/java/example/SpecificMain.java
+++ b/doc/examples/java-example/src/main/java/example/SpecificMain.java
@@ -31,43 +31,43 @@
 import example.avro.User;
 
 public class SpecificMain {
-	public static void main(String[] args) throws IOException {
-		User user1 = new User();
-		user1.setName("Alyssa");
-		user1.setFavoriteNumber(256);
-		// Leave favorite color null
+  public static void main(String[] args) throws IOException {
+    User user1 = new User();
+    user1.setName("Alyssa");
+    user1.setFavoriteNumber(256);
+    // Leave favorite color null
 
-		// Alternate constructor
-		User user2 = new User("Ben", 7, "red");
-		
-		// Construct via builder
-		User user3 = User.newBuilder()
-				     .setName("Charlie")
-				     .setFavoriteColor("blue")
-				     .setFavoriteNumber(null)
-				     .build();
+    // Alternate constructor
+    User user2 = new User("Ben", 7, "red");
 
-		// Serialize user1 and user2 to disk
-		File file = new File("users.avro");
-		DatumWriter<User> userDatumWriter = new SpecificDatumWriter<User>(User.class);
-		DataFileWriter<User> dataFileWriter = new DataFileWriter<User>(userDatumWriter);
-		dataFileWriter.create(user1.getSchema(), file);
-		dataFileWriter.append(user1);
-		dataFileWriter.append(user2);
-		dataFileWriter.append(user3);
-		dataFileWriter.close();
+    // Construct via builder
+    User user3 = User.newBuilder()
+             .setName("Charlie")
+             .setFavoriteColor("blue")
+             .setFavoriteNumber(null)
+             .build();
 
-		// Deserialize Users from disk
-		DatumReader<User> userDatumReader = new SpecificDatumReader<User>(User.class);
-		DataFileReader<User> dataFileReader = new DataFileReader<User>(file, userDatumReader);
-		User user = null;
-		while (dataFileReader.hasNext()) {
-			// Reuse user object by passing it to next(). This saves us from
-			// allocating and garbage collecting many objects for files with
-			// many items.
-			user = dataFileReader.next(user);
-			System.out.println(user);
-		}
+    // Serialize user1 and user2 to disk
+    File file = new File("users.avro");
+    DatumWriter<User> userDatumWriter = new SpecificDatumWriter<User>(User.class);
+    DataFileWriter<User> dataFileWriter = new DataFileWriter<User>(userDatumWriter);
+    dataFileWriter.create(user1.getSchema(), file);
+    dataFileWriter.append(user1);
+    dataFileWriter.append(user2);
+    dataFileWriter.append(user3);
+    dataFileWriter.close();
 
-	}
+    // Deserialize Users from disk
+    DatumReader<User> userDatumReader = new SpecificDatumReader<User>(User.class);
+    DataFileReader<User> dataFileReader = new DataFileReader<User>(file, userDatumReader);
+    User user = null;
+    while (dataFileReader.hasNext()) {
+      // Reuse user object by passing it to next(). This saves us from
+      // allocating and garbage collecting many objects for files with
+      // many items.
+      user = dataFileReader.next(user);
+      System.out.println(user);
+    }
+
+  }
 }
diff --git a/doc/examples/mr-example/src/main/java/example/MapReduceAvroWordCount.java b/doc/examples/mr-example/src/main/java/example/MapReduceAvroWordCount.java
index 8ca6a2f..5f5c919 100644
--- a/doc/examples/mr-example/src/main/java/example/MapReduceAvroWordCount.java
+++ b/doc/examples/mr-example/src/main/java/example/MapReduceAvroWordCount.java
@@ -112,7 +112,7 @@
     FileOutputFormat.setOutputPath(job, new Path(args[1]));
 
     job.waitForCompletion(true);
-    
+
     return 0;
   }
 
diff --git a/doc/src/content/xdocs/mr.xml b/doc/src/content/xdocs/mr.xml
index a93a137..7c91a24 100644
--- a/doc/src/content/xdocs/mr.xml
+++ b/doc/src/content/xdocs/mr.xml
@@ -39,7 +39,7 @@
       See the <a href="http://hadoop.apache.org/docs/current/">Hadoop
       documentation</a> and the <a href="gettingstartedjava.html">Avro getting
       started guide</a> for introductions to these projects.  This guide uses
-      the old MapReduce API (<code>org.apache.hadoop.mapred</code>) and the new 
+      the old MapReduce API (<code>org.apache.hadoop.mapred</code>) and the new
       MapReduce API (<code>org.apache.hadoop.mapreduce</code>).
     </p>
     <section>
@@ -91,10 +91,10 @@
 &#60;/plugin>
       </source>
       <p>
-        If you do not configure the <em>sourceDirectory</em> and <em>outputDirectory</em> 
-        properties, the defaults will be used. The <em>sourceDirectory</em> property 
-        defaults to <em>src/main/avro</em>. The <em>outputDirectory</em> property 
-        defaults to <em>target/generated-sources</em>. You can change the paths to 
+        If you do not configure the <em>sourceDirectory</em> and <em>outputDirectory</em>
+        properties, the defaults will be used. The <em>sourceDirectory</em> property
+        defaults to <em>src/main/avro</em>. The <em>outputDirectory</em> property
+        defaults to <em>target/generated-sources</em>. You can change the paths to
         match your project layout.
       </p>
       <p>
@@ -297,9 +297,9 @@
         ColorCount reads in data files containing <code>User</code> records,
         defined in <em>examples/user.avsc</em>, and counts the number of
         instances of each favorite color.  (This example draws inspiration from
-        the canonical WordCount MapReduce application.)  This example uses the 
-        old MapReduce API.  See MapReduceAvroWordCount, found under 
-        <em>doc/examples/mr-example/src/main/java/example/</em> to see the new MapReduce 
+        the canonical WordCount MapReduce application.)  This example uses the
+        old MapReduce API.  See MapReduceAvroWordCount, found under
+        <em>doc/examples/mr-example/src/main/java/example/</em> to see the new MapReduce
         API example.  The <code>User</code>
         schema is defined as follows:
       </p>
diff --git a/doc/src/content/xdocs/site.xml b/doc/src/content/xdocs/site.xml
index 85d5fb9..30d0b48 100644
--- a/doc/src/content/xdocs/site.xml
+++ b/doc/src/content/xdocs/site.xml
@@ -24,13 +24,13 @@
 <link href="site:changes"> links to changes.html (or ../changes.html if in
   subdir).
 - Provide aliases for external URLs in the external-refs section.  Eg, <link
-  href="ext:cocoon"> links to http://cocoon.apache.org/ 
+  href="ext:cocoon"> links to http://cocoon.apache.org/
 
 See http://forrest.apache.org/docs/linking.html for more info
 -->
 <!-- The label attribute of the outer "site" element will only show
   in the linkmap (linkmap.html).
-  Use elements project-name and group-name in skinconfig to change name of 
+  Use elements project-name and group-name in skinconfig to change name of
   your site or project that is usually shown at the top of page.
   No matter what you configure for the href attribute, Forrest will
   always use index.html when you request http://yourHost/
@@ -39,7 +39,7 @@
 
 <site label="Avro" href="" xmlns="http://apache.org/forrest/linkmap/1.0">
 
-  <docs label="Documentation"> 
+  <docs label="Documentation">
     <overview   label="Overview"          href="index.html" />
     <gettingstartedjava label="Getting started (Java)" href="gettingstartedjava.html" />
     <gettingstartedpython label="Getting started (Python)" href="gettingstartedpython.html" />
@@ -55,7 +55,7 @@
     <wiki       label="Wiki"              href="ext:wiki" />
     <faq        label="FAQ"               href="ext:faq" />
   </docs>
-  
+
   <external-refs>
     <site      href="http://avro.apache.org/"/>
     <lists     href="http://avro.apache.org/mailing_lists.html"/>
@@ -87,5 +87,5 @@
       <spec href="spec.html"/>
     </trevni>
   </external-refs>
- 
+
 </site>
diff --git a/doc/src/content/xdocs/spec.xml b/doc/src/content/xdocs/spec.xml
index 1e6f4e3..ec1f199 100644
--- a/doc/src/content/xdocs/spec.xml
+++ b/doc/src/content/xdocs/spec.xml
@@ -41,9 +41,9 @@
       <p>A Schema is represented in <a href="ext:json">JSON</a> by one of:</p>
       <ul>
         <li>A JSON string, naming a defined type.</li>
-        
+
         <li>A JSON object, of the form:
-          
+
           <source>{"type": "<em>typeName</em>" ...<em>attributes</em>...}</source>
 
           where <em>typeName</em> is either a primitive or derived
@@ -67,25 +67,25 @@
           <li><code>bytes</code>: sequence of 8-bit unsigned bytes</li>
           <li><code>string</code>: unicode character sequence</li>
         </ul>
-        
+
         <p>Primitive types have no specified attributes.</p>
-        
+
         <p>Primitive type names are also defined type names.  Thus, for
           example, the schema "string" is equivalent to:</p>
-        
+
         <source>{"type": "string"}</source>
 
       </section>
 
       <section id="schema_complex">
         <title>Complex Types</title>
-        
+
         <p>Avro supports six kinds of complex types: records, enums,
         arrays, maps, unions and fixed.</p>
 
         <section id="schema_record">
           <title>Records</title>
-          
+
 	  <p>Records use the type name "record" and support three attributes:</p>
 	  <ul>
 	    <li><code>name</code>: a JSON string providing the name
@@ -145,7 +145,7 @@
 	  <p>For example, a linked-list of 64-bit values may be defined with:</p>
 	  <source>
 {
-  "type": "record", 
+  "type": "record",
   "name": "LongList",
   "aliases": ["LinkedLongs"],                      // old name for this
   "fields" : [
@@ -155,10 +155,10 @@
 }
 	  </source>
 	</section>
-        
+
         <section>
           <title>Enums</title>
-          
+
 	  <p>Enums use the type name "enum" and support the following
 	  attributes:</p>
 	  <ul>
@@ -183,7 +183,7 @@
 }
 	  </source>
 	</section>
-        
+
         <section>
           <title>Arrays</title>
           <p>Arrays use the type name <code>"array"</code> and support
@@ -420,7 +420,7 @@
 	    <p>For example, the record schema</p>
 	    <source>
 	      {
-	      "type": "record", 
+	      "type": "record",
 	      "name": "test",
 	      "fields" : [
 	      {"name": "a", "type": "long"},
@@ -436,7 +436,7 @@
 	      sequence:</p>
 	    <source>36 06 66 6f 6f</source>
 	  </section>
-          
+
           <section id="enum_encoding">
             <title>Enums</title>
             <p>An enum is encoded by a <code>int</code>, representing
@@ -487,13 +487,13 @@
               value, followed by that many key/value pairs.  A block
               with count zero indicates the end of the map.  Each item
               is encoded per the map's value schema.</p>
-	    
+	
             <p>If a block's count is negative, its absolute value is used,
               and the count is followed immediately by a <code>long</code>
               block <em>size</em> indicating the number of bytes in the
               block.  This block size permits fast skipping through data,
               e.g., when projecting a record to a subset of its fields.</p>
-	    
+	
             <p>The blocked representation permits one to read and write
               maps larger than can be buffered in memory, since one can
               start writing items without knowing the full length of the
@@ -531,7 +531,7 @@
 
       <section id="json_encoding">
         <title>JSON Encoding</title>
-        
+
         <p>Except for unions, the JSON encoding is the same as is used
         to encode <a href="#schema_record">field default
         values</a>.</p>
@@ -547,7 +547,7 @@
           types (record, fixed or enum) the user-specified name is
           used, for other types the type name is used.</li>
         </ul>
-          
+
         <p>For example, the union
           schema <code>["null","string","Foo"]</code>, where Foo is a
           record name, would encode:</p>
@@ -666,7 +666,7 @@
         stored in the file, as JSON data (required).</li>
         <li><strong>avro.codec</strong> the name of the compression codec
         used to compress blocks, as a string.  Implementations
-        are required to support the following codecs: "null" and "deflate".  
+        are required to support the following codecs: "null" and "deflate".
         If codec is absent, it is assumed to be "null".  The codecs
         are described with more detail below.</li>
       </ul>
@@ -692,7 +692,7 @@
         <li>The file's 16-byte sync marker.</li>
       </ul>
           <p>Thus, each block's binary data can be efficiently extracted or skipped without
-          deserializing the contents.  The combination of block size, object counts, and 
+          deserializing the contents.  The combination of block size, object counts, and
           sync markers enable detection of corrupt blocks and help ensure data integrity.</p>
       <section>
       <title>Required Codecs</title>
@@ -704,7 +704,7 @@
         <section>
         <title>deflate</title>
         <p>The "deflate" codec writes the data block using the
-        deflate algorithm as specified in 
+        deflate algorithm as specified in
         <a href="http://www.isi.edu/in-notes/rfc1951.txt">RFC 1951</a>,
         and typically implemented using the zlib library.  Note that this
         format (unlike the "zlib format" in RFC 1950) does not have a
@@ -757,7 +757,7 @@
           <li>a <em>request</em>, a list of named,
             typed <em>parameter</em> schemas (this has the same form
             as the fields of a record declaration);</li>
-          <li>a <em>response</em> schema; </li> 
+          <li>a <em>response</em> schema; </li>
           <li>an optional union of declared <em>error</em> schemas.
 	    The <em>effective</em> union has <code>"string"</code>
 	    prepended to the declared union, to permit transmission of
@@ -1092,14 +1092,14 @@
           <ul>
             <li>the ordering of fields may be different: fields are
               matched by name.</li>
-            
+
             <li>schemas for fields with the same name in both records
               are resolved recursively.</li>
-            
+
             <li>if the writer's record contains a field with a name
               not present in the reader's record, the writer's value
               for that field is ignored.</li>
-            
+
             <li>if the reader's record schema has a field that
               contains a default value, and writer's schema does not
               have a field with the same name, then the reader should
@@ -1137,13 +1137,13 @@
             writer's schema is recursively resolved against it.  If none
             match, an error is signalled.</p>
         </li>
-          
+
         <li><strong>if writer's is a union, but reader's is not</strong>
           <p>If the reader's schema matches the selected writer's schema,
             it is recursively resolved against it.  If they do not
             match, an error is signalled.</p>
         </li>
-          
+
       </ul>
 
       <p>A schema's "doc" fields are ignored for the purposes of schema resolution.  Hence,
diff --git a/doc/src/content/xdocs/tabs.xml b/doc/src/content/xdocs/tabs.xml
index 0a532b5..54ac0a7 100644
--- a/doc/src/content/xdocs/tabs.xml
+++ b/doc/src/content/xdocs/tabs.xml
@@ -34,6 +34,6 @@
 
   <tab label="Project" href="http://avro.apache.org/" />
   <tab label="Wiki" href="http://wiki.apache.org/hadoop/Avro/" />
-  <tab label="Avro &AvroVersion; Documentation" dir="" />  
-  
+  <tab label="Avro &AvroVersion; Documentation" dir="" />
+
 </tabs>
diff --git a/doc/src/skinconf.xml b/doc/src/skinconf.xml
index fcd7933..3b92362 100644
--- a/doc/src/skinconf.xml
+++ b/doc/src/skinconf.xml
@@ -34,7 +34,7 @@
   <search name="Avro" domain="avro.apache.org" provider="google"/>
 
   <!-- Disable the print link? If enabled, invalid HTML 4.0.1 -->
-  <disable-print-link>true</disable-print-link>  
+  <disable-print-link>true</disable-print-link>
   <!-- Disable the PDF link? -->
   <disable-pdf-link>false</disable-pdf-link>
   <!-- Disable the POD link? -->
@@ -52,8 +52,8 @@
   <!-- Disable navigation icons on all external links? -->
   <disable-external-link-image>true</disable-external-link-image>
 
-  <!-- Disable w3c compliance links? 
-    Use e.g. align="center" to move the compliance links logos to 
+  <!-- Disable w3c compliance links?
+    Use e.g. align="center" to move the compliance links logos to
     an alternate location default is left.
     (if the skin supports it) -->
   <disable-compliance-links>true</disable-compliance-links>
@@ -105,7 +105,7 @@
   <!-- Configure the TOC, i.e. the Table of Contents.
   @max-depth
    how many "section" levels need to be included in the
-   generated Table of Contents (TOC). 
+   generated Table of Contents (TOC).
   @min-sections
    Minimum required to create a TOC.
   @location ("page","menu","page,menu", "none")
@@ -115,7 +115,7 @@
 
   <!-- Heading types can be clean|underlined|boxed  -->
   <headings type="clean"/>
-  
+
   <!-- The optional feedback element will be used to construct a
     feedback link in the footer with the page pathname appended:
     <a href="@href">{@to}</a>
@@ -125,15 +125,15 @@
   </feedback>
     -->
   <!--
-    extra-css - here you can define custom css-elements that are 
-    a. overriding the fallback elements or 
-    b. adding the css definition from new elements that you may have 
+    extra-css - here you can define custom css-elements that are
+    a. overriding the fallback elements or
+    b. adding the css definition from new elements that you may have
        used in your documentation.
     -->
   <extra-css>
-    <!--Example of b. 
+    <!--Example of b.
         To define the css definition of a new element that you may have used
-        in the class attribute of a <p> node. 
+        in the class attribute of a <p> node.
         e.g. <p class="quote"/>
     -->
     p.quote {
@@ -162,27 +162,27 @@
 
     <color name="heading" value="#a5b6c6"/>
     <color name="subheading" value="#CFDCED"/>
-        
+
     <color name="navstrip" value="#CFDCED" font="#000000" link="#000000" vlink="#000000" hlink="#000000"/>
     <color name="toolbox" value="#a5b6c6"/>
     <color name="border" value="#a5b6c6"/>
-        
-    <color name="menu" value="#F7F7F7" link="#000000" vlink="#000000" hlink="#000000"/>    
+
+    <color name="menu" value="#F7F7F7" link="#000000" vlink="#000000" hlink="#000000"/>
     <color name="dialog" value="#F7F7F7"/>
-            
+
     <color name="body"    value="#ffffff" link="#0F3660" vlink="#009999" hlink="#000066"/>
-    
-    <color name="table" value="#a5b6c6"/>    
-    <color name="table-cell" value="#ffffff"/>    
+
+    <color name="table" value="#a5b6c6"/>
+    <color name="table-cell" value="#ffffff"/>
     <color name="highlight" value="#ffff00"/>
     <color name="fixme" value="#cc6600"/>
     <color name="note" value="#006699"/>
     <color name="warning" value="#990000"/>
     <color name="code" value="#a5b6c6"/>
-        
+
     <color name="footer" value="#a5b6c6"/>
 -->
-  
+
   <!-- Forrest -->
 <!--
     <color name="header"    value="#294563"/>
@@ -194,28 +194,28 @@
 
     <color name="heading" value="#294563"/>
     <color name="subheading" value="#4a6d8c"/>
-        
+
     <color name="navstrip" value="#cedfef" font="#0F3660" link="#0F3660" vlink="#0F3660" hlink="#000066"/>
     <color name="toolbox" value="#4a6d8c"/>
     <color name="border" value="#294563"/>
-    
-    <color name="menu" value="#4a6d8c" font="#cedfef" link="#ffffff" vlink="#ffffff" hlink="#ffcf00"/>    
+
+    <color name="menu" value="#4a6d8c" font="#cedfef" link="#ffffff" vlink="#ffffff" hlink="#ffcf00"/>
     <color name="dialog" value="#4a6d8c"/>
-            
+
     <color name="body" value="#ffffff"  link="#0F3660" vlink="#009999" hlink="#000066"/>
-    
-    <color name="table" value="#7099C5"/>    
-    <color name="table-cell" value="#f0f0ff"/>    
+
+    <color name="table" value="#7099C5"/>
+    <color name="table-cell" value="#f0f0ff"/>
     <color name="highlight" value="#ffff00"/>
     <color name="fixme" value="#cc6600"/>
     <color name="note" value="#006699"/>
     <color name="warning" value="#990000"/>
     <color name="code" value="#CFDCED"/>
-        
+
     <color name="footer" value="#cedfef"/>
 -->
 
-  <!-- Collabnet --> 
+  <!-- Collabnet -->
 <!--
     <color name="header"    value="#003366"/>
 
@@ -226,24 +226,24 @@
 
     <color name="heading" value="#003366"/>
     <color name="subheading" value="#888888"/>
-    
+
     <color name="navstrip" value="#dddddd" font="#555555"/>
     <color name="toolbox" value="#dddddd" font="#555555"/>
     <color name="border" value="#999999"/>
-    
-    <color name="menu" value="#ffffff"/>    
+
+    <color name="menu" value="#ffffff"/>
     <color name="dialog" value="#eeeeee"/>
-            
+
     <color name="body"      value="#ffffff"/>
-    
-    <color name="table" value="#ccc"/>    
-    <color name="table-cell" value="#ffffff"/>   
+
+    <color name="table" value="#ccc"/>
+    <color name="table-cell" value="#ffffff"/>
     <color name="highlight" value="#ffff00"/>
     <color name="fixme" value="#cc6600"/>
     <color name="note" value="#006699"/>
     <color name="warning" value="#990000"/>
     <color name="code" value="#003366"/>
-        
+
     <color name="footer" value="#ffffff"/>
 -->
  <!-- Lenya using pelt-->
@@ -264,28 +264,28 @@
     <color name="toolbox" value="#CFDCED" font="#000000"/>
 
     <color name="border" value="#999999"/>
-    <color name="menu" value="#4C6C8F" font="#ffffff" link="#ffffff" vlink="#ffffff" hlink="#ffffff" current="#FFCC33" />    
+    <color name="menu" value="#4C6C8F" font="#ffffff" link="#ffffff" vlink="#ffffff" hlink="#ffffff" current="#FFCC33" />
     <color name="menuheading" value="#cfdced" font="#000000" />
     <color name="searchbox" value="#E5E4D9" font="#000000"/>
-    
+
     <color name="dialog" value="#CFDCED"/>
-    <color name="body" value="#ffffff" />            
-    
-    <color name="table" value="#ccc"/>    
-    <color name="table-cell" value="#ffffff"/>   
+    <color name="body" value="#ffffff" />
+
+    <color name="table" value="#ccc"/>
+    <color name="table-cell" value="#ffffff"/>
     <color name="highlight" value="#ffff00"/>
     <color name="fixme" value="#cc6600"/>
     <color name="note" value="#006699"/>
     <color name="warning" value="#990000"/>
     <color name="code" value="#003366"/>
-        
+
     <color name="footer" value="#E5E4D9"/>
 -->
   </colors>
- 
+
   <!-- Settings specific to PDF output. -->
   <pdf>
-    <!-- 
+    <!--
        Supported page sizes are a0, a1, a2, a3, a4, a5, executive,
        folio, legal, ledger, letter, quarto, tabloid (default letter).
        Supported page orientations are portrait, landscape (default
@@ -326,7 +326,7 @@
   <!-- Credits are typically rendered as a set of small clickable
     images in the page footer.
     Use box-location="alt" to move the credit to an alternate location
-    (if the skin supports it). 
+    (if the skin supports it).
   -->
   <credits>
     <credit box-location="alt">
diff --git a/lang/c++/build.sh b/lang/c++/build.sh
index 8ee36f6..e5beaea 100755
--- a/lang/c++/build.sh
+++ b/lang/c++/build.sh
@@ -29,9 +29,9 @@
 
 if [ -f VERSION.txt ]
 then
-VERSION=`cat VERSION.txt`
+  VERSION=`cat VERSION.txt`
 else
-VERSION=`cat ../../share/VERSION.txt`
+  VERSION=`cat ../../share/VERSION.txt`
 fi
 
 BUILD=../../build
@@ -42,69 +42,69 @@
 DIST_DIR=../../dist/$AVRO_CPP
 DOC_CPP=$BUILD/$AVRO_DOC/api/cpp
 DIST_DIR=../../dist/cpp
-TARFILE=../dist/cpp/$AVRO_CPP.tar.gz 
+TARFILE=../dist/cpp/$AVRO_CPP.tar.gz
 
 (mkdir -p build; cd build; cmake -G "Unix Makefiles" ..)
 for target in "$@"
 do
 
 function do_doc() {
-    doxygen
-    if [ -d doc ]
-    then
-        mkdir -p $DOC_CPP
-        cp -R doc/* $DOC_CPP
-    else
-        exit 1
-    fi
+  doxygen
+  if [ -d doc ]
+  then
+    mkdir -p $DOC_CPP
+    cp -R doc/* $DOC_CPP
+  else
+    exit 1
+  fi
 }
 function do_dist() {
-    rm -rf $BUILD_CPP/
-    mkdir -p $BUILD_CPP
-    cp -r api AUTHORS build.sh CMakeLists.txt ChangeLog \
-        LICENSE NOTICE impl jsonschemas NEWS parser README scripts test examples \
-        $BUILD_CPP
-    find $BUILD_CPP -name '.svn' | xargs rm -rf
-    cp ../../share/VERSION.txt $BUILD_CPP
-    mkdir -p $DIST_DIR
-    (cd $BUILD_DIR; tar cvzf $TARFILE $AVRO_CPP && cp $TARFILE $AVRO_CPP )
-    if [ ! -f $DIST_FILE ]
-    then
-        exit 1
-    fi
+  rm -rf $BUILD_CPP/
+  mkdir -p $BUILD_CPP
+  cp -r api AUTHORS build.sh CMakeLists.txt ChangeLog \
+    LICENSE NOTICE impl jsonschemas NEWS parser README scripts test examples \
+    $BUILD_CPP
+  find $BUILD_CPP -name '.svn' | xargs rm -rf
+  cp ../../share/VERSION.txt $BUILD_CPP
+  mkdir -p $DIST_DIR
+  (cd $BUILD_DIR; tar cvzf $TARFILE $AVRO_CPP && cp $TARFILE $AVRO_CPP )
+  if [ ! -f $DIST_FILE ]
+  then
+    exit 1
+  fi
 }
 
 case "$target" in
-    test)
+  test)
     (cd build && make && cd .. \
-        && ./build/buffertest \
-        && ./build/unittest \
-        && ./build/CodecTests \
-        && ./build/StreamTests \
-        && ./build/SpecificTests \
-        && ./build/AvrogencppTests \
-        && ./build/DataFileTests)
-	;;
-
-    dist)
-        do_dist
-        do_doc
+      && ./build/buffertest \
+      && ./build/unittest \
+      && ./build/CodecTests \
+      && ./build/StreamTests \
+      && ./build/SpecificTests \
+      && ./build/AvrogencppTests \
+      && ./build/DataFileTests)
     ;;
 
-    doc)
-        do_doc
+  dist)
+    do_dist
+    do_doc
     ;;
 
-    clean)
+  doc)
+    do_doc
+    ;;
+
+  clean)
     (cd build && make clean)
-	;;
+    ;;
 
-    install)
+  install)
     (cd build && make install)
     ;;
 
-    *)
-        usage
+  *)
+    usage
 esac
 
 done
diff --git a/lang/c/build.sh b/lang/c/build.sh
index 832cc79..e00db69 100755
--- a/lang/c/build.sh
+++ b/lang/c/build.sh
@@ -18,8 +18,8 @@
 # under the License.
 #
 
-set -e						  # exit on error
-#set -x		
+set -e        # exit on error
+#set -x
 
 root_dir=$(pwd)
 build_dir="../../build/c"
@@ -36,59 +36,59 @@
 
 function clean {
   if [ -d $build_dir ]; then
-	find $build_dir | xargs chmod 755
-	rm -rf $build_dir
+  find $build_dir | xargs chmod 755
+  rm -rf $build_dir
   fi
 }
 
 case "$1" in
 
-    interop-data-generate)
-	prepare_build
-	make -C $build_dir
-	$build_dir/tests/generate_interop_data "../../share/test/schemas/interop.avsc"  "../../build/interop/data"
-	;;
+  interop-data-generate)
+    prepare_build
+    make -C $build_dir
+    $build_dir/tests/generate_interop_data "../../share/test/schemas/interop.avsc"  "../../build/interop/data"
+    ;;
 
-    interop-data-test)
-	prepare_build
-	make -C $build_dir
-	$build_dir/tests/test_interop_data "../../build/interop/data"
-	;;
+  interop-data-test)
+    prepare_build
+    make -C $build_dir
+    $build_dir/tests/test_interop_data "../../build/interop/data"
+    ;;
 
-    test)
-	prepare_build
-	make -C $build_dir
-	make -C $build_dir test
-        clean
-	;;
+  test)
+    prepare_build
+    make -C $build_dir
+    make -C $build_dir test
+    clean
+    ;;
 
-    dist)
-	prepare_build
-	cp ../../share/VERSION.txt $root_dir
-	make -C $build_dir docs
-        # This is a hack to force the built documentation to be included
-        # in the source package.
-	cp $build_dir/docs/*.html $root_dir/docs
-	make -C $build_dir package_source
-	rm $root_dir/docs/*.html
-	if [ ! -d $dist_dir ]; then 
-           mkdir -p $dist_dir 
-        fi
-	if [ ! -d $doc_dir ]; then
-           mkdir -p $doc_dir
-	fi
-	mv $build_dir/$tarball $dist_dir
-	cp $build_dir/docs/*.html $doc_dir
-        clean
-	;;
+  dist)
+    prepare_build
+    cp ../../share/VERSION.txt $root_dir
+    make -C $build_dir docs
+    # This is a hack to force the built documentation to be included
+    # in the source package.
+    cp $build_dir/docs/*.html $root_dir/docs
+    make -C $build_dir package_source
+    rm $root_dir/docs/*.html
+    if [ ! -d $dist_dir ]; then
+      mkdir -p $dist_dir
+    fi
+    if [ ! -d $doc_dir ]; then
+      mkdir -p $doc_dir
+    fi
+    mv $build_dir/$tarball $dist_dir
+    cp $build_dir/docs/*.html $doc_dir
+    clean
+    ;;
 
-    clean)
-        clean
-	;;
+  clean)
+    clean
+    ;;
 
-    *)
-        echo "Usage: $0 {interop-data-generate|interop-data-test|test|dist|clean}"
-        exit 1
+  *)
+    echo "Usage: $0 {interop-data-generate|interop-data-test|test|dist|clean}"
+    exit 1
 esac
 
 exit 0
diff --git a/lang/c/version.sh b/lang/c/version.sh
index c0215a2..027c08f 100755
--- a/lang/c/version.sh
+++ b/lang/c/version.sh
@@ -28,7 +28,7 @@
 # Do each of these steps in order and libtool will do the right thing
 # (1) If there are changes to libavro:
 #         libavro_micro_version++
-#         libavro_interface_age++ 
+#         libavro_interface_age++
 #         libavro_binary_age++
 # (2) If any functions have been added:
 #         libavro_interface_age = 0
@@ -42,9 +42,9 @@
 
 # IGNORE EVERYTHING ELSE FROM HERE DOWN.........
 if test $# != 1; then
-	echo "USAGE: $0 CMD"
-  	echo "  where CMD is one of: project, libtool, libcurrent, librevision, libage"
-	exit 1
+  echo "USAGE: $0 CMD"
+    echo "  where CMD is one of: project, libtool, libcurrent, librevision, libage"
+  exit 1
 fi
 
 # http://sources.redhat.com/autobook/autobook/autobook_91.html
@@ -53,29 +53,29 @@
 # The implementation number of the 'current' interface
 librevision=$libavro_interface_age
 # The difference between the newest and oldest interfaces that this library implements
-# In other words, the library implements all the interface numbers in the range from 
+# In other words, the library implements all the interface numbers in the range from
 # number 'current - age' to current
 libage=$(($libavro_binary_age - $libavro_interface_age))
 
 if test "$1" = "project"; then
-	project_ver="undef"
-	version_file="VERSION.txt"
-	if test -f $version_file; then
-		project_ver=$(cat $version_file)
-	else
-		version_file="../../share/VERSION.txt"
-		if test -f $version_file; then
-			project_ver=$(cat $version_file)
-		fi
-	fi
-	printf "%s" $project_ver
+  project_ver="undef"
+  version_file="VERSION.txt"
+  if test -f $version_file; then
+    project_ver=$(cat $version_file)
+  else
+    version_file="../../share/VERSION.txt"
+    if test -f $version_file; then
+      project_ver=$(cat $version_file)
+    fi
+  fi
+  printf "%s" $project_ver
 elif test "$1" = "libtool"; then
-	# useful for the -version-info flag for libtool
-	printf "%d:%d:%d" $libcurrent $librevision $libage
+  # useful for the -version-info flag for libtool
+  printf "%d:%d:%d" $libcurrent $librevision $libage
 elif test "$1" = "libcurrent"; then
-	printf "%d" $libcurrent
+  printf "%d" $libcurrent
 elif test "$1" = "librevision"; then
-	printf "%d" $librevision
+  printf "%d" $librevision
 elif test "$1" = "libage"; then
-	printf "%d" $libage
+  printf "%d" $libage
 fi
diff --git a/lang/csharp/build.sh b/lang/csharp/build.sh
index 1664266..66520c1 100755
--- a/lang/csharp/build.sh
+++ b/lang/csharp/build.sh
@@ -15,10 +15,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-set -e						  # exit on error
-set -x		
+set -e                # exit on error
+set -x
 
-cd `dirname "$0"`				  # connect to root
+cd `dirname "$0"`                  # connect to root
 
 ROOT=../..
 VERSION=`cat $ROOT/share/VERSION.txt`
@@ -28,38 +28,38 @@
 
 case "$1" in
 
-    test)
-	xbuild
-	nunit-console -framework=4.0 Avro.nunit
-	;;
+  test)
+    xbuild
+    nunit-console -framework=4.0 Avro.nunit
+    ;;
 
-    perf)
-	xbuild
-	mono build/perf/Release/Avro.perf.exe
-	;;
+  perf)
+    xbuild
+    mono build/perf/Release/Avro.perf.exe
+    ;;
 
-    dist)
-        # build binary tarball
-	xbuild
-  # add the binary LICENSE and NOTICE to the tarball
-  cp LICENSE NOTICE build/
-	mkdir -p $ROOT/dist/csharp
-        (cd build; tar czf $ROOT/../dist/csharp/avro-csharp-$VERSION.tar.gz main codegen ipc LICENSE NOTICE)
+  dist)
+    # build binary tarball
+    xbuild
+    # add the binary LICENSE and NOTICE to the tarball
+    cp LICENSE NOTICE build/
+    mkdir -p $ROOT/dist/csharp
+    (cd build; tar czf $ROOT/../dist/csharp/avro-csharp-$VERSION.tar.gz main codegen ipc LICENSE NOTICE)
 
-        # build documentation
-        doxygen Avro.dox
-	mkdir -p $ROOT/build/avro-doc-$VERSION/api/csharp
-        cp -pr build/doc/* $ROOT/build/avro-doc-$VERSION/api/csharp
-	;;
+    # build documentation
+    doxygen Avro.dox
+    mkdir -p $ROOT/build/avro-doc-$VERSION/api/csharp
+    cp -pr build/doc/* $ROOT/build/avro-doc-$VERSION/api/csharp
+    ;;
 
-    clean)
-	rm -rf src/apache/{main,test,codegen,ipc}/obj
-        rm -rf build
-	;;
+  clean)
+    rm -rf src/apache/{main,test,codegen,ipc}/obj
+    rm -rf build
+    ;;
 
-    *)
-        echo "Usage: $0 {test|clean|dist|perf}"
-        exit 1
+  *)
+    echo "Usage: $0 {test|clean|dist|perf}"
+    exit 1
 esac
 
 exit 0
diff --git a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/avro/order-service.avpr b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/avro/order-service.avpr
index b9ea5c5..c835141 100644
--- a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/avro/order-service.avpr
+++ b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/avro/order-service.avpr
@@ -2,45 +2,45 @@
   "namespace":"${package}.service",
   "protocol": "OrderProcessingService",
   "doc": "Protocol to submit customer Orders",
-  "types": [    
- 	{
+  "types": [
+    {
       "name": "Item", "type": "record",
       "fields": [
         {"name": "name", "type": "string"},
         {"name": "sku", "type": "long"},
- 		{"name": "quantity", "type": "int"}
- 	  ]
- 	},
- 	{
+        {"name": "quantity", "type": "int"}
+      ]
+    },
+    {
       "name": "Order", "type": "record",
       "fields": [
         {"name": "customerId", "type": "long"},
         {"name": "orderId", "type": "long"},
- 		{"name": "orderItems", "type": {"type": "array", "items": "Item"}}
- 	  ]
- 	},
- 	{
+        {"name": "orderItems", "type": {"type": "array", "items": "Item"}}
+      ]
+    },
+    {
       "name": "Confirmation", "type": "record",
       "fields": [
         {"name": "customerId", "type": {"type": "long"}},
         {"name": "orderId", "type": "long"},
         {"name": "estimatedCompletion", "type": "long"}
- 	  ]
- 	},
- 	{
+      ]
+    },
+    {
       "name": "OrderFailure", "type": "error",
       "fields": [
-      	{"name": "message", "type": "string"}
+        {"name": "message", "type": "string"}
       ]
- 	} 	
+    }
    ],
-    
+
    "messages": {
      "submitOrder": {
-	   "doc": "Submit an Order",
-	   "request": [{"name": "order", "type": "Order"}],
-	   "response": "Confirmation",
-	   "errors": ["OrderFailure"]
-	 }
+       "doc": "Submit an Order",
+       "request": [{"name": "order", "type": "Order"}],
+       "response": "Confirmation",
+       "errors": ["OrderFailure"]
+     }
    }
 }
diff --git a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/service/SimpleOrderService.java b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/service/SimpleOrderService.java
index 161ddc3..0c933a9 100644
--- a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/service/SimpleOrderService.java
+++ b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/service/SimpleOrderService.java
@@ -31,7 +31,7 @@
  */
 public class SimpleOrderService implements OrderProcessingService {
 
-	private Logger log = LoggerFactory.getLogger(SimpleOrderService.class);
+  private Logger log = LoggerFactory.getLogger(SimpleOrderService.class);
 
   @Override
   public Confirmation submitOrder(Order order) throws AvroRemoteException, OrderFailure {
diff --git a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/test/java/integration/SimpleOrderServiceIntegrationTest.java b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/test/java/integration/SimpleOrderServiceIntegrationTest.java
index e751192..7886666 100644
--- a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/test/java/integration/SimpleOrderServiceIntegrationTest.java
+++ b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/test/java/integration/SimpleOrderServiceIntegrationTest.java
@@ -44,44 +44,44 @@
  */
 public class SimpleOrderServiceIntegrationTest {
 
-	private static SimpleOrderServiceEndpoint service;
-	private static SimpleOrderServiceClient client;
+  private static SimpleOrderServiceEndpoint service;
+  private static SimpleOrderServiceClient client;
 
-	@Test
-	public void simpleRoundTripTest() throws Exception {
-		Order simpleOrder = createOrder();
-		Confirmation c = client.submitOrder(simpleOrder);
+  @Test
+  public void simpleRoundTripTest() throws Exception {
+    Order simpleOrder = createOrder();
+    Confirmation c = client.submitOrder(simpleOrder);
 
-		assertEquals(c.getOrderId(), simpleOrder.getOrderId());
-		assertEquals(c.getCustomerId(), simpleOrder.getCustomerId());
-		assertTrue(c.getEstimatedCompletion() > 0);
-	}
+    assertEquals(c.getOrderId(), simpleOrder.getOrderId());
+    assertEquals(c.getCustomerId(), simpleOrder.getCustomerId());
+    assertTrue(c.getEstimatedCompletion() > 0);
+  }
 
-	@BeforeClass
-	public static void setupTransport() throws Exception {
-		InetSocketAddress endpointAddress = new InetSocketAddress("0.0.0.0", 12345);
-		service = new SimpleOrderServiceEndpoint(endpointAddress);
-		client = new SimpleOrderServiceClient(endpointAddress);
+  @BeforeClass
+  public static void setupTransport() throws Exception {
+    InetSocketAddress endpointAddress = new InetSocketAddress("0.0.0.0", 12345);
+    service = new SimpleOrderServiceEndpoint(endpointAddress);
+    client = new SimpleOrderServiceClient(endpointAddress);
 
-		service.start();
-		client.start();
-	}
+    service.start();
+    client.start();
+  }
 
-	@AfterClass
-	public static void shutdownTransport() throws Exception {
-		client.stop();
-		service.stop();
-	}
+  @AfterClass
+  public static void shutdownTransport() throws Exception {
+    client.stop();
+    service.stop();
+  }
 
-	public Order createOrder() {
-		return Order.newBuilder().setOrderId(1).setCustomerId(1).setOrderItems(createItems()).build();
-	}
+  public Order createOrder() {
+    return Order.newBuilder().setOrderId(1).setCustomerId(1).setOrderItems(createItems()).build();
+  }
 
-	public List<Item> createItems() {
-		List<Item> items = new ArrayList<Item>();
-		for (int x = 0; x < 5; x++)
-			items.add(Item.newBuilder().setName("Item-" + x).setQuantity(x + 1).setSku(1230 + x).build());
-		return items;
-	}
+  public List<Item> createItems() {
+    List<Item> items = new ArrayList<Item>();
+    for (int x = 0; x < 5; x++)
+      items.add(Item.newBuilder().setName("Item-" + x).setQuantity(x + 1).setSku(1230 + x).build());
+    return items;
+  }
 
 }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/AvroRemoteException.java b/lang/java/avro/src/main/java/org/apache/avro/AvroRemoteException.java
index 11e2125..8af0f71 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/AvroRemoteException.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/AvroRemoteException.java
@@ -35,12 +35,12 @@
     super(value != null ? value.toString() : null);
     this.value = value;
   }
-  
+
   public AvroRemoteException(Object value, Throwable cause) {
     super(value != null ? value.toString() : null, cause);
     this.value = value;
   }
-  
+
   public Object getValue() { return value; }
 }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java b/lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java
index 6273036..4e18c09 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java
@@ -31,7 +31,7 @@
 /**
  * Base class for objects that have JSON-valued properties. Avro and JSON values are
  * represented in Java using the following mapping:
- * 
+ *
  * <table>
  *   <th>
  *     <td>Avro type</td>
@@ -154,7 +154,7 @@
    * value <tt>value</tt>. Neither <tt>name</tt> nor <tt>value</tt> can be
    * <tt>null</tt>. It is illegal to add a property if another with
    * the same name but different value already exists in this schema.
-   * 
+   *
    * @param name The name of the property to add
    * @param value The value for the property to add
    */
@@ -167,7 +167,7 @@
    * value <tt>value</tt>. Neither <tt>name</tt> nor <tt>value</tt> can be
    * <tt>null</tt>. It is illegal to add a property if another with
    * the same name but different value already exists in this schema.
-   * 
+   *
    * @param name The name of the property to add
    * @param value The value for the property to add
    * @deprecated use {@link #addProp(String, Object)}
@@ -176,10 +176,10 @@
   public synchronized void addProp(String name, JsonNode value) {
     if (reserved.contains(name))
       throw new AvroRuntimeException("Can't set reserved property: " + name);
-      
+
     if (value == null)
       throw new AvroRuntimeException("Can't set a property to null: " + name);
-    
+
     JsonNode old = props.get(name);
     if (old == null)
       props.put(name, value);
diff --git a/lang/java/avro/src/main/java/org/apache/avro/Protocol.java b/lang/java/avro/src/main/java/org/apache/avro/Protocol.java
index 9c896d2..73e235c 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/Protocol.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/Protocol.java
@@ -110,7 +110,7 @@
     public Schema getErrors() {
       return Schema.createUnion(new ArrayList<Schema>());
     }
-    
+
     /** Returns true if this is a one-way message, with no response or errors.*/
     public boolean isOneWay() { return true; }
 
@@ -161,7 +161,7 @@
   private class TwoWayMessage extends Message {
     private Schema response;
     private Schema errors;
-    
+
     /** Construct a message. */
     private TwoWayMessage(String name, String doc, Map<String,?> propMap,
                           Schema request, Schema response, Schema errors) {
@@ -245,7 +245,7 @@
 
   /** The namespace of this protocol.  Qualifies its name. */
   public String getNamespace() { return namespace; }
-  
+
   /** Doc string for this protocol. */
   public String getDoc() { return doc; }
 
@@ -300,7 +300,7 @@
       && this.messages.equals(that.messages)
       && this.props.equals(that.props);
   }
-  
+
   public int hashCode() {
     return name.hashCode() + namespace.hashCode()
       + types.hashCode() + messages.hashCode() + props.hashCode();
@@ -340,7 +340,7 @@
       if (!resolved.contains(type))
         type.toJson(resolved, gen);
     gen.writeEndArray();
-    
+
     gen.writeObjectFieldStart("messages");
     for (Map.Entry<String,Message> e : messages.entrySet()) {
       gen.writeFieldName(e.getKey());
@@ -379,7 +379,7 @@
       b.append(part);
     return parse(b.toString());
   }
-      
+
   /** Read a protocol from a Json string. */
   public static Protocol parse(String string) {
     try {
@@ -415,7 +415,7 @@
     this.namespace = nameNode.getTextValue();
     types.space(this.namespace);
   }
-  
+
   private void parseDoc(JsonNode json) {
     this.doc = parseDocNode(json);
   }
@@ -505,7 +505,7 @@
       fields.add(newField);
     }
     Schema request = Schema.createRecord(fields);
-    
+
     boolean oneWay = false;
     JsonNode oneWayNode = json.get("one-way");
     if (oneWayNode != null) {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/Schema.java b/lang/java/avro/src/main/java/org/apache/avro/Schema.java
index e2ba927..8125692 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/Schema.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/Schema.java
@@ -63,7 +63,7 @@
  * <li>A <i>boolean</i>; or
  * <li><i>null</i>.
  * </ul>
- * 
+ *
  * A schema can be constructed using one of its static <tt>createXXX</tt>
  * methods, or more conveniently using {@link SchemaBuilder}. The schema objects are
  * <i>logically</i> immutable.
@@ -231,13 +231,13 @@
   /** If this is an enum, return its symbols. */
   public List<String> getEnumSymbols() {
     throw new AvroRuntimeException("Not an enum: "+this);
-  }    
+  }
 
   /** If this is an enum, return a symbol's ordinal value. */
   public int getEnumOrdinal(String symbol) {
     throw new AvroRuntimeException("Not an enum: "+this);
-  }    
-  
+  }
+
   /** If this is an enum, returns true if it contains given symbol. */
   public boolean hasEnumSymbol(String symbol) {
     throw new AvroRuntimeException("Not an enum: "+this);
@@ -460,7 +460,7 @@
         props.equals(that.props);
     }
     public int hashCode() { return name.hashCode() + schema.computeHash(); }
-    
+
     private boolean defaultValueEquals(JsonNode thatDefaultValue) {
       if (defaultValue == null)
         return thatDefaultValue == null;
@@ -870,12 +870,12 @@
         hash += type.computeHash();
       return hash;
     }
-    
+
     @Override
     public void addProp(String name, String value) {
       throw new AvroRuntimeException("Can't set properties on a union: "+this);
     }
-    
+
     void toJson(Names names, JsonGenerator gen) throws IOException {
       gen.writeStartArray();
       for (Schema type : types)
@@ -944,7 +944,7 @@
   private static class BooleanSchema extends Schema {
     public BooleanSchema() { super(Type.BOOLEAN); }
   }
-  
+
   private static class NullSchema extends Schema {
     public NullSchema() { super(Type.NULL); }
   }
@@ -1012,7 +1012,7 @@
         b.append(part);
       return parse(b.toString());
     }
-      
+
     /** Parse a schema from the provided string.
      * If named, the schema is added to the names known to this parser. */
     public Schema parse(String s) {
@@ -1129,14 +1129,14 @@
       return super.put(name, schema);
     }
   }
-  
+
   private static ThreadLocal<Boolean> validateNames
     = new ThreadLocal<Boolean>() {
     @Override protected Boolean initialValue() {
       return true;
     }
   };
-    
+
   private static String validateName(String name) {
     if (!validateNames.get()) return name;        // not validating names
     int length = name.length();
@@ -1159,7 +1159,7 @@
       return false;
     }
   };
-    
+
   private static JsonNode validateDefault(String fieldName, Schema schema,
                                           JsonNode defaultValue) {
     if (VALIDATE_DEFAULTS.get() && (defaultValue != null)
@@ -1175,7 +1175,7 @@
     if (defaultValue == null)
       return false;
     switch (schema.getType()) {
-    case STRING:  
+    case STRING:
     case BYTES:
     case ENUM:
     case FIXED:
@@ -1356,7 +1356,7 @@
         throw new SchemaParseException("alias not a string: "+aliasNode);
       aliases.add(aliasNode.getTextValue());
     }
-    return aliases;  
+    return aliases;
   }
 
   /** Extracts text value associated to key from the container JsonNode,
@@ -1413,7 +1413,7 @@
 
     if (aliases.size() == 0 && fieldAliases.size() == 0)
       return writer;                              // no aliases
-    
+
     seen.clear();
     return applyAliases(writer, seen, aliases, fieldAliases);
   }
@@ -1533,13 +1533,13 @@
    * called on it.
    * @param <E>
    */
-  
+
   /*
    * This class keeps a boolean variable <tt>locked</tt> which is set
    * to <tt>true</tt> in the lock() method. It's legal to call
    * lock() any number of times. Any lock() other than the first one
    * is a no-op.
-   * 
+   *
    * This class throws <tt>IllegalStateException</tt> if a mutating
    * operation is performed after being locked. Since modifications through
    * iterator also use the list's mutating operations, this effectively
@@ -1548,7 +1548,7 @@
   static class LockableArrayList<E> extends ArrayList<E> {
     private static final long serialVersionUID = 1L;
     private boolean locked = false;
-    
+
     public LockableArrayList() {
     }
 
@@ -1580,42 +1580,42 @@
       ensureUnlocked();
       return super.add(e);
     }
-    
+
     public boolean remove(Object o) {
       ensureUnlocked();
       return super.remove(o);
     }
-    
+
     public E remove(int index) {
       ensureUnlocked();
       return super.remove(index);
     }
-      
+
     public boolean addAll(Collection<? extends E> c) {
       ensureUnlocked();
       return super.addAll(c);
     }
-    
+
     public boolean addAll(int index, Collection<? extends E> c) {
       ensureUnlocked();
       return super.addAll(index, c);
     }
-    
+
     public boolean removeAll(Collection<?> c) {
       ensureUnlocked();
       return super.removeAll(c);
     }
-    
+
     public boolean retainAll(Collection<?> c) {
       ensureUnlocked();
       return super.retainAll(c);
     }
-    
+
     public void clear() {
       ensureUnlocked();
       super.clear();
     }
 
   }
-  
+
 }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
index 5573014..f1a1faa 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
@@ -44,7 +44,7 @@
  * </p>
  * For example, the below JSON schema and the fluent builder code to create it
  * are very similar:
- * 
+ *
  * <pre>
  * {
  *   "type": "record",
@@ -58,7 +58,7 @@
  *   ]
  * }
  * </pre>
- * 
+ *
  * <pre>
  *   Schema schema = SchemaBuilder
  *   .record("HandshakeRequest").namespace("org.apache.avro.ipc)
@@ -70,7 +70,7 @@
  *   .endRecord();
  * </pre>
  * <p/>
- * 
+ *
  * <h5>Usage Guide</h5>
  * SchemaBuilder chains together many smaller builders and maintains nested
  * context in order to mimic the Avro Schema specification. Every Avro type in
@@ -87,7 +87,7 @@
  * share a similar API for selecting and building types.
  * <p/>
  * <h5>Primitive Types</h5>
- * All Avro primitive types are trivial to configure. A primitive type in 
+ * All Avro primitive types are trivial to configure. A primitive type in
  * Avro JSON can be declared two ways, one that supports custom properties
  * and one that does not:
  * <pre>
@@ -141,12 +141,12 @@
  * <h6>Nested Types</h6>
  * The Avro nested types, map and array, can have custom properties like
  * all avro types, are not named, and must specify a nested type.
- * After configuration of optional properties, an array or map 
+ * After configuration of optional properties, an array or map
  * builds or selects its nested type with {@link ArrayBuilder#items()}
  * and {@link MapBuilder#values()}, respectively.
- * 
+ *
  * <h6>Fields</h6>
- * {@link RecordBuilder#fields()} returns a {@link FieldAssembler} for 
+ * {@link RecordBuilder#fields()} returns a {@link FieldAssembler} for
  * defining the fields of the record and completing it.
  * Each field must have a name, specified via {@link FieldAssembler#name(String)},
  * which returns a {@link FieldBuilder} for defining aliases, custom properties,
@@ -159,9 +159,9 @@
  * {@link IntDefault#intDefault(int)}
  * <p/>
  * There are field shortcut methods on {@link FieldAssembler} for primitive types.
- * These shortcuts create required, optional, and nullable fields, but do not 
+ * These shortcuts create required, optional, and nullable fields, but do not
  * support field aliases, doc, or custom properties.
- * 
+ *
  * <h6>Unions</h6>
  * Union types are built via {@link TypeBuilder#unionOf()} or
  * {@link FieldTypeBuilder#unionOf()} in the context of type selection.
@@ -196,7 +196,7 @@
  *   .name("f").type().unionOf().nullType().and().longType().endUnion().nullDefault()
  *   .name("f").type().optional().longType()
  * </pre>
- * 
+ *
  * <h6>Explicit Types and Types by Name</h6>
  * Types can also be specified explicitly by passing in a Schema, or by name:
  * <pre>
@@ -218,7 +218,7 @@
 
   private SchemaBuilder() {
   }
-  
+
   /**
    * Create a builder for Avro schemas.
    */
@@ -234,7 +234,7 @@
     return new TypeBuilder<Schema>(new SchemaCompletion(),
         new NameContext().namespace(namespace));
   }
-  
+
   /**
    * Create a builder for an Avro record with the specified name.
    * This is equivalent to:
@@ -292,7 +292,7 @@
   public static MapBuilder<Schema> map() {
     return builder().map();
   }
-  
+
   /**
    * Create a builder for an Avro union
    * This is equivalent to:
@@ -303,7 +303,7 @@
   public static BaseTypeBuilder<UnionAccumulator<Schema>> unionOf() {
     return builder().unionOf();
   }
-  
+
   /**
    * Create a builder for a union of a type and null.
    * This is a shortcut for:
@@ -322,7 +322,7 @@
     return builder().nullable();
   }
 
-  
+
   /**
    * An abstract builder for all Avro types.  All Avro types
    * can have arbitrary string key-value properties.
@@ -331,14 +331,14 @@
     private Map<String, JsonNode> props = null;
     protected PropBuilder() {
     }
-    
+
     /**
      * Set name-value pair properties for this type or field.
      */
     public final S prop(String name, String val) {
       return prop(name, TextNode.valueOf(val));
     }
-    
+
     // for internal use by the Parser
     final S prop(String name, JsonNode val) {
       if(!hasProps()) {
@@ -347,11 +347,11 @@
       props.put(name, val);
       return self();
     }
-    
+
     private boolean hasProps() {
       return (props != null);
     }
-    
+
     final <T extends JsonProperties> T addPropsTo(T jsonable) {
       if (hasProps()) {
         for(Map.Entry<String, JsonNode> prop : props.entrySet()) {
@@ -364,7 +364,7 @@
      * must return 'this' **/
     protected abstract S self();
   }
-  
+
   /**
    * An abstract type that provides builder methods for configuring the name,
    * doc, and aliases of all Avro types that have names (fields, Fixed, Record,
@@ -429,7 +429,7 @@
       return field;
     }
   }
-  
+
   /**
    * An abstract type that provides builder methods for configuring the
    * namespace for all Avro types that have namespaces (Fixed, Record, and
@@ -475,7 +475,7 @@
       return context;
     }
   }
-  
+
   /**
    * An abstraction for sharing code amongst all primitive type builders.
    */
@@ -779,7 +779,7 @@
     }
 
   }
-  
+
   /**
    * Builds an Avro Map type with optional properties.
    * <p/>
@@ -873,12 +873,12 @@
 
   /**
    * internal class for passing the naming context around. This allows for the
-   * following: 
+   * following:
    * <li>Cache and re-use primitive schemas when they do not set
    * properties.</li>
    * <li>Provide a default namespace for nested contexts (as
    * the JSON Schema spec does).</li>
-   * <li>Allow previously defined named types or primitive types 
+   * <li>Allow previously defined named types or primitive types
    * to be referenced by name.</li>
    **/
   private static class NameContext {
@@ -895,7 +895,7 @@
     }
     private final HashMap<String, Schema> schemas;
     private final String namespace;
-    
+
     private NameContext() {
       this.schemas = new HashMap<String, Schema>();
       this.namespace = null;
@@ -908,20 +908,20 @@
       schemas.put("bytes", Schema.create(Schema.Type.BYTES));
       schemas.put("string", Schema.create(Schema.Type.STRING));
     }
-    
+
     private NameContext(HashMap<String, Schema> schemas, String namespace) {
       this.schemas = schemas;
       this.namespace = "".equals(namespace) ? null : namespace;
     }
-    
+
     private NameContext namespace(String namespace) {
       return new NameContext(schemas, namespace);
     }
-    
+
     private Schema get(String name, String namespace) {
       return getFullname(resolveName(name, namespace));
     }
-    
+
     private Schema getFullname(String fullName) {
       Schema schema = schemas.get(fullName);
       if(schema == null) {
@@ -929,7 +929,7 @@
       }
       return schema;
     }
-    
+
     private void put(Schema schema) {
       String fullName = schema.getFullName();
       if(schemas.containsKey(fullName)){
@@ -937,7 +937,7 @@
      }
      schemas.put(fullName, schema);
     }
-    
+
     private String resolveName(String name, String space) {
       if (PRIMITIVES.contains(name) && space == null) {
         return name;
@@ -950,11 +950,11 @@
         if (space != null && !"".equals(space)) {
           return space + "." + name;
         }
-      } 
+      }
       return name;
     }
   }
- 
+
   /**
    * A common API for building types within a context. BaseTypeBuilder can build
    * all types other than Unions. {@link TypeBuilder} can additionally build
@@ -971,17 +971,17 @@
   public static class BaseTypeBuilder<R> {
     private final Completion<R> context;
     private final NameContext names;
-    
+
     private BaseTypeBuilder(Completion<R> context, NameContext names) {
       this.context = context;
       this.names = names;
     }
-    
+
     /** Use the schema provided as the type. **/
     public final R type(Schema schema) {
       return context.complete(schema);
     }
-    
+
     /**
      * Look up the type by name. This type must be previously defined in the
      * context of this builder.
@@ -993,7 +993,7 @@
     public final R type(String name) {
       return type(name, null);
     }
-    
+
     /**
      * Look up the type by name and namespace. This type must be previously
      * defined in the context of this builder.
@@ -1042,7 +1042,7 @@
     public final IntBuilder<R> intBuilder() {
       return IntBuilder.create(context, names);
     }
-    
+
     /**
      * A plain long type without custom properties. This is equivalent to:
      * <pre>
@@ -1159,7 +1159,7 @@
      * <pre>
      * {"type":"map", "values":"int"}
      * </pre>
-     **/ 
+     **/
     public final MapBuilder<R> map() {
       return MapBuilder.create(context, names);
     }
@@ -1172,7 +1172,7 @@
      * <pre>
      * {"type":"array", "values":"long"}
      * </pre>
-     **/ 
+     **/
     public final ArrayBuilder<R> array() {
       return ArrayBuilder.create(context, names);
     }
@@ -1185,12 +1185,12 @@
      * <pre>
      * {"type":"fixed", "name":"com.foo.IPv4", "size":4}
      * </pre>
-     **/ 
+     **/
     public final FixedBuilder<R> fixed(String name) {
       return FixedBuilder.create(context, names, name);
     }
-    
-    /** Build an Avro enum type. Example usage: 
+
+    /** Build an Avro enum type. Example usage:
      * <pre>
      * enumeration("Suits").namespace("org.cards").doc("card suit names")
      *   .symbols("HEART", "SPADE", "DIAMOND", "CLUB")
@@ -1201,7 +1201,7 @@
      *  "doc":"card suit names", "symbols":[
      *    "HEART", "SPADE", "DIAMOND", "CLUB"]}
      * </pre>
-     **/ 
+     **/
     public final EnumBuilder<R> enumeration(String name) {
       return EnumBuilder.create(context, names, name);
     }
@@ -1224,18 +1224,18 @@
      *     ]}
      *   ]}
      * </pre>
-     **/ 
+     **/
     public final RecordBuilder<R> record(String name) {
-      return RecordBuilder.create(context, names, name); 
+      return RecordBuilder.create(context, names, name);
     }
-    
+
     /** Build an Avro union schema type. Example usage:
      * <pre>unionOf().stringType().and().bytesType().endUnion()</pre>
-     **/ 
+     **/
     protected BaseTypeBuilder<UnionAccumulator<R>> unionOf() {
       return UnionBuilder.create(context, names);
     }
-    
+
     /** A shortcut for building a union of a type and null.
      * <p/>
      * For example, the code snippets below are equivalent:
@@ -1245,10 +1245,10 @@
     protected BaseTypeBuilder<R> nullable() {
       return new BaseTypeBuilder<R>(new NullableCompletion<R>(context), names);
     }
-    
+
   }
 
-  /** A Builder for creating any Avro schema type. 
+  /** A Builder for creating any Avro schema type.
    **/
   public static final class TypeBuilder<R> extends BaseTypeBuilder<R> {
     private TypeBuilder(Completion<R> context, NameContext names) {
@@ -1259,7 +1259,7 @@
     public BaseTypeBuilder<UnionAccumulator<R>> unionOf() {
       return super.unionOf();
     }
-    
+
     @Override
     public BaseTypeBuilder<R> nullable() {
       return super.nullable();
@@ -1310,7 +1310,7 @@
       this.names = bldr.names();
       this.wrapper = wrapper;
     }
-    
+
     /**
      * A plain boolean type without custom properties. This is equivalent to:
      * <pre>
@@ -1346,7 +1346,7 @@
     public final IntBuilder<IntDefault<R>> intBuilder() {
       return IntBuilder.create(wrap(new IntDefault<R>(bldr)), names);
     }
-    
+
     /**
      * A plain long type without custom properties. This is equivalent to:
      * <pre>
@@ -1455,31 +1455,31 @@
       return NullBuilder.create(wrap(new NullDefault<R>(bldr)), names);
     }
 
-    /** Build an Avro map type **/ 
+    /** Build an Avro map type **/
     public final MapBuilder<MapDefault<R>> map() {
       return MapBuilder.create(wrap(new MapDefault<R>(bldr)), names);
     }
 
-    /** Build an Avro array type **/ 
+    /** Build an Avro array type **/
     public final ArrayBuilder<ArrayDefault<R>> array() {
       return ArrayBuilder.create(wrap(new ArrayDefault<R>(bldr)), names);
     }
 
-    /** Build an Avro fixed type. **/ 
+    /** Build an Avro fixed type. **/
     public final FixedBuilder<FixedDefault<R>> fixed(String name) {
       return FixedBuilder.create(wrap(new FixedDefault<R>(bldr)), names, name);
     }
-    
-    /** Build an Avro enum type. **/ 
+
+    /** Build an Avro enum type. **/
     public final EnumBuilder<EnumDefault<R>> enumeration(String name) {
       return EnumBuilder.create(wrap(new EnumDefault<R>(bldr)), names, name);
     }
 
-    /** Build an Avro record type. **/ 
+    /** Build an Avro record type. **/
     public final RecordBuilder<RecordDefault<R>> record(String name) {
-      return RecordBuilder.create(wrap(new RecordDefault<R>(bldr)), names, name); 
+      return RecordBuilder.create(wrap(new RecordDefault<R>(bldr)), names, name);
     }
-    
+
     private <C> Completion<C> wrap(
        Completion<C> completion) {
       if (wrapper != null) {
@@ -1488,7 +1488,7 @@
       return completion;
     }
   }
-  
+
   /** FieldTypeBuilder adds {@link #unionOf()}, {@link #nullable()}, and {@link #optional()}
    * to BaseFieldTypeBuilder. **/
   public static final class FieldTypeBuilder<R> extends BaseFieldTypeBuilder<R> {
@@ -1496,7 +1496,7 @@
       super(bldr, null);
     }
 
-    /** Build an Avro union schema type. **/ 
+    /** Build an Avro union schema type. **/
     public UnionFieldTypeBuilder<R> unionOf() {
       return new UnionFieldTypeBuilder<R>(bldr);
     }
@@ -1537,7 +1537,7 @@
       this.bldr = bldr;
       this.names = bldr.names();
     }
-    
+
     /**
      * A plain boolean type without custom properties. This is equivalent to:
      * <pre>
@@ -1573,7 +1573,7 @@
     public IntBuilder<UnionAccumulator<IntDefault<R>>> intBuilder() {
       return IntBuilder.create(completion(new IntDefault<R>(bldr)), names);
     }
-    
+
     /**
      * A plain long type without custom properties. This is equivalent to:
      * <pre>
@@ -1682,31 +1682,31 @@
       return NullBuilder.create(completion(new NullDefault<R>(bldr)), names);
     }
 
-    /** Build an Avro map type **/ 
+    /** Build an Avro map type **/
     public MapBuilder<UnionAccumulator<MapDefault<R>>> map() {
       return MapBuilder.create(completion(new MapDefault<R>(bldr)), names);
     }
 
-    /** Build an Avro array type **/ 
+    /** Build an Avro array type **/
     public ArrayBuilder<UnionAccumulator<ArrayDefault<R>>> array() {
       return ArrayBuilder.create(completion(new ArrayDefault<R>(bldr)), names);
     }
 
-    /** Build an Avro fixed type. **/ 
+    /** Build an Avro fixed type. **/
     public FixedBuilder<UnionAccumulator<FixedDefault<R>>> fixed(String name) {
       return FixedBuilder.create(completion(new FixedDefault<R>(bldr)), names, name);
     }
-    
-    /** Build an Avro enum type. **/ 
+
+    /** Build an Avro enum type. **/
     public EnumBuilder<UnionAccumulator<EnumDefault<R>>> enumeration(String name) {
       return EnumBuilder.create(completion(new EnumDefault<R>(bldr)), names, name);
     }
 
-    /** Build an Avro record type. **/ 
+    /** Build an Avro record type. **/
     public RecordBuilder<UnionAccumulator<RecordDefault<R>>> record(String name) {
-      return RecordBuilder.create(completion(new RecordDefault<R>(bldr)), names, name); 
+      return RecordBuilder.create(completion(new RecordDefault<R>(bldr)), names, name);
     }
-    
+
     private <C> UnionCompletion<C> completion(Completion<C> context) {
       return new UnionCompletion<C>(context, names, new ArrayList<Schema>());
     }
@@ -1756,7 +1756,7 @@
     public FieldBuilder<R> name(String fieldName) {
       return new FieldBuilder<R>(this, names, fieldName);
     }
-    
+
     /**
      * Shortcut for creating a boolean field with the given name and no default.
      * <p/>This is equivalent to:
@@ -1767,9 +1767,9 @@
     public FieldAssembler<R> requiredBoolean(String fieldName) {
       return name(fieldName).type().booleanType().noDefault();
     }
-    
+
     /**
-     * Shortcut for creating an optional boolean field: a union of null and 
+     * Shortcut for creating an optional boolean field: a union of null and
      * boolean with null default.<p/>
      * This is equivalent to:
      * <pre>
@@ -1779,13 +1779,13 @@
     public FieldAssembler<R> optionalBoolean(String fieldName) {
       return name(fieldName).type().optional().booleanType();
     }
-    
+
     /**
      * Shortcut for creating a nullable boolean field: a union of boolean and
      * null with an boolean default.
      * <p/>
      * This is equivalent to:
-     * 
+     *
      * <pre>
      * name(fieldName).type().nullable().booleanType().booleanDefault(defaultVal)
      * </pre>
@@ -1805,7 +1805,7 @@
     public FieldAssembler<R> requiredInt(String fieldName) {
       return name(fieldName).type().intType().noDefault();
     }
-    
+
     /**
      * Shortcut for creating an optional int field: a union of null and int
      * with null default.<p/>
@@ -1817,7 +1817,7 @@
     public FieldAssembler<R> optionalInt(String fieldName) {
       return name(fieldName).type().optional().intType();
     }
-    
+
     /**
      * Shortcut for creating a nullable int field: a union of int and null
      * with an int default.<p/>
@@ -1840,7 +1840,7 @@
     public FieldAssembler<R> requiredLong(String fieldName) {
       return name(fieldName).type().longType().noDefault();
     }
-    
+
     /**
      * Shortcut for creating an optional long field: a union of null and long
      * with null default.<p/>
@@ -1852,7 +1852,7 @@
     public FieldAssembler<R> optionalLong(String fieldName) {
       return name(fieldName).type().optional().longType();
     }
-    
+
     /**
      * Shortcut for creating a nullable long field: a union of long and null
      * with a long default.<p/>
@@ -1864,7 +1864,7 @@
     public FieldAssembler<R> nullableLong(String fieldName, long defaultVal) {
       return name(fieldName).type().nullable().longType().longDefault(defaultVal);
     }
-    
+
     /**
      * Shortcut for creating a float field with the given name and no default.
      * <p/>This is equivalent to:
@@ -1875,7 +1875,7 @@
     public FieldAssembler<R> requiredFloat(String fieldName) {
       return name(fieldName).type().floatType().noDefault();
     }
-    
+
     /**
      * Shortcut for creating an optional float field: a union of null and float
      * with null default.<p/>
@@ -1887,7 +1887,7 @@
     public FieldAssembler<R> optionalFloat(String fieldName) {
       return name(fieldName).type().optional().floatType();
     }
-    
+
     /**
      * Shortcut for creating a nullable float field: a union of float and null
      * with a float default.<p/>
@@ -1910,7 +1910,7 @@
     public FieldAssembler<R> requiredDouble(String fieldName) {
       return name(fieldName).type().doubleType().noDefault();
     }
-    
+
     /**
      * Shortcut for creating an optional double field: a union of null and double
      * with null default.<p/>
@@ -1922,7 +1922,7 @@
     public FieldAssembler<R> optionalDouble(String fieldName) {
       return name(fieldName).type().optional().doubleType();
     }
-    
+
     /**
      * Shortcut for creating a nullable double field: a union of double and null
      * with a double default.<p/>
@@ -1934,7 +1934,7 @@
     public FieldAssembler<R> nullableDouble(String fieldName, double defaultVal) {
       return name(fieldName).type().nullable().doubleType().doubleDefault(defaultVal);
     }
-    
+
     /**
      * Shortcut for creating a string field with the given name and no default.
      * <p/>This is equivalent to:
@@ -1945,7 +1945,7 @@
     public FieldAssembler<R> requiredString(String fieldName) {
       return name(fieldName).type().stringType().noDefault();
     }
-    
+
     /**
      * Shortcut for creating an optional string field: a union of null and string
      * with null default.<p/>
@@ -1957,7 +1957,7 @@
     public FieldAssembler<R> optionalString(String fieldName) {
       return name(fieldName).type().optional().stringType();
     }
-    
+
     /**
      * Shortcut for creating a nullable string field: a union of string and null
      * with a string default.<p/>
@@ -1980,7 +1980,7 @@
     public FieldAssembler<R> requiredBytes(String fieldName) {
       return name(fieldName).type().bytesType().noDefault();
     }
-    
+
     /**
      * Shortcut for creating an optional bytes field: a union of null and bytes
      * with null default.<p/>
@@ -1992,7 +1992,7 @@
     public FieldAssembler<R> optionalBytes(String fieldName) {
       return name(fieldName).type().optional().bytesType();
     }
-    
+
     /**
      * Shortcut for creating a nullable bytes field: a union of bytes and null
      * with a bytes default.<p/>
@@ -2018,12 +2018,12 @@
       fields.add(field);
       return this;
     }
-    
+
   }
-  
+
   /**
    * Builds a Field in the context of a {@link FieldAssembler}.
-   * 
+   *
    * Usage is to first configure any of the optional parameters and then to call one
    * of the type methods to complete the field.  For example
    * <pre>
@@ -2040,13 +2040,13 @@
       super(names, name);
       this.fields = fields;
     }
-    
+
     /** Set this field to have ascending order.  Ascending is the default **/
     public FieldBuilder<R> orderAscending() {
       order = Schema.Field.Order.ASCENDING;
       return self();
     }
-    
+
     /** Set this field to have decending order.  Decending is the default **/
     public FieldBuilder<R> orderDescending() {
       order = Schema.Field.Order.DESCENDING;
@@ -2058,7 +2058,7 @@
       order = Schema.Field.Order.IGNORE;
       return self();
     }
-    
+
     /**
      * Final step in configuring this field, finalizing name, namespace, alias,
      * and order.
@@ -2070,7 +2070,7 @@
 
     /**
      * Final step in configuring this field, finalizing name, namespace, alias,
-     * and order.  Sets the field's type to the provided schema, returns a 
+     * and order.  Sets the field's type to the provided schema, returns a
      * {@link GenericDefault}.
      */
     public GenericDefault<R> type(Schema type) {
@@ -2110,16 +2110,16 @@
       Schema schema = names().get(name, namespace);
       return type(schema);
     }
-    
+
     private FieldAssembler<R> completeField(Schema schema, Object defaultVal) {
       JsonNode defaultNode = toJsonNode(defaultVal);
       return completeField(schema, defaultNode);
     }
-    
+
     private FieldAssembler<R> completeField(Schema schema) {
       return completeField(schema, null);
     }
-    
+
     private FieldAssembler<R> completeField(Schema schema, JsonNode defaultVal) {
       Field field = new Field(name(), schema, doc(), defaultVal, order);
       addPropsTo(field);
@@ -2132,7 +2132,7 @@
       return this;
     }
   }
-    
+
   /** Abstract base class for field defaults. **/
   public static abstract class FieldDefault<R, S extends FieldDefault<R, S>> extends Completion<S> {
     private final FieldBuilder<R> field;
@@ -2140,25 +2140,25 @@
     FieldDefault(FieldBuilder<R> field) {
       this.field = field;
     }
-    
+
     /** Completes this field with no default value **/
     public final FieldAssembler<R> noDefault() {
       return field.completeField(schema);
     }
-    
+
     private FieldAssembler<R> usingDefault(Object defaultVal) {
       return field.completeField(schema, defaultVal);
     }
-    
+
     @Override
     final S complete(Schema schema) {
       this.schema = schema;
       return self();
     }
-    
+
     abstract S self();
   }
-  
+
   /** Choose whether to use a default value for the field or not. **/
   public static class BooleanDefault<R> extends FieldDefault<R, BooleanDefault<R>> {
     private BooleanDefault(FieldBuilder<R> field) {
@@ -2169,13 +2169,13 @@
     public final FieldAssembler<R> booleanDefault(boolean defaultVal) {
       return super.usingDefault(defaultVal);
     }
-    
+
     @Override
     final BooleanDefault<R> self() {
       return this;
     }
   }
-  
+
   /** Choose whether to use a default value for the field or not. **/
   public static class IntDefault<R> extends FieldDefault<R, IntDefault<R>> {
     private IntDefault(FieldBuilder<R> field) {
@@ -2186,13 +2186,13 @@
     public final FieldAssembler<R> intDefault(int defaultVal) {
       return super.usingDefault(defaultVal);
     }
-    
+
     @Override
     final IntDefault<R> self() {
       return this;
     }
   }
-  
+
   /** Choose whether to use a default value for the field or not. **/
   public static class LongDefault<R> extends FieldDefault<R, LongDefault<R>> {
     private LongDefault(FieldBuilder<R> field) {
@@ -2203,7 +2203,7 @@
     public final FieldAssembler<R> longDefault(long defaultVal) {
       return super.usingDefault(defaultVal);
     }
-    
+
     @Override
     final LongDefault<R> self() {
       return this;
@@ -2220,7 +2220,7 @@
     public final FieldAssembler<R> floatDefault(float defaultVal) {
       return super.usingDefault(defaultVal);
     }
-    
+
     @Override
     final FloatDefault<R> self() {
       return this;
@@ -2237,7 +2237,7 @@
     public final FieldAssembler<R> doubleDefault(double defaultVal) {
       return super.usingDefault(defaultVal);
     }
-    
+
     @Override
     final DoubleDefault<R> self() {
       return this;
@@ -2254,7 +2254,7 @@
     public final FieldAssembler<R> stringDefault(String defaultVal) {
       return super.usingDefault(defaultVal);
     }
-    
+
     @Override
     final StringDefault<R> self() {
       return this;
@@ -2266,24 +2266,24 @@
     private BytesDefault(FieldBuilder<R> field) {
       super(field);
     }
-    
+
     /** Completes this field with the default value provided, cannot be null **/
     public final FieldAssembler<R> bytesDefault(byte[] defaultVal) {
       return super.usingDefault(ByteBuffer.wrap(defaultVal));
     }
-    
+
     /** Completes this field with the default value provided, cannot be null **/
     public final FieldAssembler<R> bytesDefault(ByteBuffer defaultVal) {
       return super.usingDefault(defaultVal);
     }
-    
+
     /** Completes this field with the default value provided, cannot be null.
      * The string is interpreted as a byte[], with each character code point
      * value equalling the byte value, as in the Avro spec JSON default. **/
     public final FieldAssembler<R> bytesDefault(String defaultVal) {
-      return super.usingDefault(defaultVal);  
+      return super.usingDefault(defaultVal);
     }
-    
+
     @Override
     final BytesDefault<R> self() {
       return this;
@@ -2300,110 +2300,110 @@
     public final FieldAssembler<R> nullDefault() {
       return super.usingDefault(null);
     }
-    
+
     @Override
     final NullDefault<R> self() {
       return this;
     }
   }
-  
+
   /** Choose whether to use a default value for the field or not. **/
   public static class MapDefault<R> extends FieldDefault<R, MapDefault<R>> {
     private MapDefault(FieldBuilder<R> field) {
       super(field);
     }
-    
+
     /** Completes this field with the default value provided, cannot be null **/
     public final <K, V> FieldAssembler<R> mapDefault(Map<K, V> defaultVal) {
       return super.usingDefault(defaultVal);
     }
-    
+
     @Override
     final MapDefault<R> self() {
       return this;
     }
   }
-  
+
   /** Choose whether to use a default value for the field or not. **/
   public static class ArrayDefault<R> extends FieldDefault<R, ArrayDefault<R>> {
     private ArrayDefault(FieldBuilder<R> field) {
       super(field);
     }
-    
+
     /** Completes this field with the default value provided, cannot be null **/
     public final <V> FieldAssembler<R> arrayDefault(List<V> defaultVal) {
       return super.usingDefault(defaultVal);
     }
-    
+
     @Override
     final ArrayDefault<R> self() {
       return this;
     }
   }
-  
+
   /** Choose whether to use a default value for the field or not. **/
   public static class FixedDefault<R> extends FieldDefault<R, FixedDefault<R>> {
     private FixedDefault(FieldBuilder<R> field) {
       super(field);
     }
-    
+
     /** Completes this field with the default value provided, cannot be null **/
     public final FieldAssembler<R> fixedDefault(byte[] defaultVal) {
       return super.usingDefault(ByteBuffer.wrap(defaultVal));
     }
-    
+
     /** Completes this field with the default value provided, cannot be null **/
     public final FieldAssembler<R> fixedDefault(ByteBuffer defaultVal) {
       return super.usingDefault(defaultVal);
     }
-    
+
     /** Completes this field with the default value provided, cannot be null.
      * The string is interpreted as a byte[], with each character code point
      * value equalling the byte value, as in the Avro spec JSON default. **/
     public final FieldAssembler<R> fixedDefault(String defaultVal) {
-      return super.usingDefault(defaultVal);  
+      return super.usingDefault(defaultVal);
     }
-    
+
     @Override
     final FixedDefault<R> self() {
       return this;
     }
   }
-  
+
   /** Choose whether to use a default value for the field or not. **/
   public static class EnumDefault<R> extends FieldDefault<R, EnumDefault<R>> {
     private EnumDefault(FieldBuilder<R> field) {
       super(field);
     }
-    
+
     /** Completes this field with the default value provided, cannot be null **/
     public final FieldAssembler<R> enumDefault(String defaultVal) {
       return super.usingDefault(defaultVal);
     }
-    
+
     @Override
     final EnumDefault<R> self() {
       return this;
     }
   }
-  
+
   /** Choose whether to use a default value for the field or not. **/
   public static class RecordDefault<R> extends FieldDefault<R, RecordDefault<R>> {
     private RecordDefault(FieldBuilder<R> field) {
       super(field);
     }
-    
+
     /** Completes this field with the default value provided, cannot be null **/
     public final FieldAssembler<R> recordDefault(GenericRecord defaultVal) {
       return super.usingDefault(defaultVal);
     }
-    
+
     @Override
     final RecordDefault<R> self() {
       return this;
     }
   }
-  
+
   public final static class GenericDefault<R> {
     private final FieldBuilder<R> field;
     private final Schema schema;
@@ -2411,37 +2411,37 @@
       this.field = field;
       this.schema = schema;
     }
-    
+
     /** Do not use a default value for this field. **/
     public FieldAssembler<R> noDefault() {
       return field.completeField(schema);
     }
-    
+
     /** Completes this field with the default value provided.
      * The value must conform to the schema of the field. **/
     public FieldAssembler<R> withDefault(Object defaultVal) {
       return field.completeField(schema, defaultVal);
     }
   }
-  
-  /** 
+
+  /**
    * Completion<R> is for internal builder use, all subclasses are private.
-   * 
+   *
    * Completion is an object that takes a Schema and returns some result.
    */
   private abstract static class Completion<R> {
     abstract R complete(Schema schema);
   }
-  
+
   private static class SchemaCompletion extends Completion<Schema> {
     @Override
     protected Schema complete(Schema schema) {
       return schema;
     }
   }
-  
+
   private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL);
-  
+
   private static class NullableCompletion<R> extends Completion<R> {
     private final Completion<R> context;
     private NullableCompletion(Completion<R> context) {
@@ -2454,7 +2454,7 @@
       return context.complete(nullable);
     }
   }
-  
+
   private static class OptionalCompletion<R> extends Completion<FieldAssembler<R>> {
     private final FieldBuilder<R> bldr;
     public OptionalCompletion(FieldBuilder<R> bldr) {
@@ -2467,11 +2467,11 @@
       return bldr.completeField(optional, (Object)null);
     }
   }
-  
+
   private abstract static class CompletionWrapper {
     abstract <R> Completion<R> wrap(Completion<R> completion);
   }
-  
+
   private static final class NullableCompletionWrapper extends CompletionWrapper {
     @Override
     <R> Completion<R> wrap(Completion<R> completion) {
@@ -2539,7 +2539,7 @@
       return new UnionAccumulator<R>(context, names, updated);
     }
   }
-  
+
   /** Accumulates all of the types in a union.  Add an additional type with
    * {@link #and()}.  Complete the union with {@link #endUnion()}
    */
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaValidationStrategy.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidationStrategy.java
index dc1c9cc..8d73375 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/SchemaValidationStrategy.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidationStrategy.java
@@ -29,7 +29,7 @@
 
   /**
    * Validates that one schema is compatible with another.
-   * 
+   *
    * @throws SchemaValidationException if the schemas are not compatible.
    */
   void validate(Schema toValidate, Schema existing)
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaValidator.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidator.java
index 197c5c0..af85bac 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/SchemaValidator.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidator.java
@@ -36,7 +36,7 @@
    * chronological order. This allows some validators to identify which schemas
    * are the most "recent" in order to validate only against the mosst recent
    * schema(s).
-   * 
+   *
    * @param toValidate The schema to validate
    * @param existing The schemas to validate against, in order from most recent to latest if applicable
    * @throws SchemaValidationException if the schema fails to validate.
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaValidatorBuilder.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidatorBuilder.java
index e1563d2..5875435 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/SchemaValidatorBuilder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidatorBuilder.java
@@ -57,17 +57,17 @@
     this.strategy = new ValidateMutualRead();
     return this;
   }
-  
+
   public SchemaValidator validateLatest() {
     valid();
     return new ValidateLatest(strategy);
   }
-  
+
   public SchemaValidator validateAll() {
     valid();
     return new ValidateAll(strategy);
   }
-  
+
   private void valid() {
     if(null == strategy) {
       throw new AvroRuntimeException("SchemaValidationStrategy not specified in builder");
diff --git a/lang/java/avro/src/main/java/org/apache/avro/UnresolvedUnionException.java b/lang/java/avro/src/main/java/org/apache/avro/UnresolvedUnionException.java
index ed66aca..e875baf 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/UnresolvedUnionException.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/UnresolvedUnionException.java
@@ -22,7 +22,7 @@
 public class UnresolvedUnionException extends AvroRuntimeException {
   private Object unresolvedDatum;
   private Schema unionSchema;
-  
+
   public UnresolvedUnionException(Schema unionSchema, Object unresolvedDatum) {
     super("Not in union "+unionSchema+": "+unresolvedDatum);
     this.unionSchema = unionSchema;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/ValidateCanBeRead.java b/lang/java/avro/src/main/java/org/apache/avro/ValidateCanBeRead.java
index 60d4b04..fed5a10 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/ValidateCanBeRead.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/ValidateCanBeRead.java
@@ -22,14 +22,14 @@
  * A {@link SchemaValidationStrategy} that checks that the data written with the
  * {@link Schema} to validate can be read by the existing schema according to
  * the default Avro schema resolution rules.
- * 
+ *
  */
 class ValidateCanBeRead implements SchemaValidationStrategy {
 
   /**
    * Validate that data written with first schema provided can be read using the
    * second schema, according to the default Avro schema resolution rules.
-   * 
+   *
    * @throws SchemaValidationException
    *           if the second schema cannot read data written by the first.
    */
diff --git a/lang/java/avro/src/main/java/org/apache/avro/ValidateCanRead.java b/lang/java/avro/src/main/java/org/apache/avro/ValidateCanRead.java
index bbf0c1e..7384eca 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/ValidateCanRead.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/ValidateCanRead.java
@@ -22,7 +22,7 @@
  * A {@link SchemaValidationStrategy} that checks that the {@link Schema} to
  * validate can read the existing schema according to the default Avro schema
  * resolution rules.
- * 
+ *
  */
 class ValidateCanRead implements SchemaValidationStrategy {
 
@@ -30,7 +30,7 @@
    * Validate that the first schema provided can be used to read data written
    * with the second schema, according to the default Avro schema resolution
    * rules.
-   * 
+   *
    * @throws SchemaValidationException
    *           if the first schema cannot read data written by the second.
    */
diff --git a/lang/java/avro/src/main/java/org/apache/avro/ValidateMutualRead.java b/lang/java/avro/src/main/java/org/apache/avro/ValidateMutualRead.java
index 5f8861e..e142b41 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/ValidateMutualRead.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/ValidateMutualRead.java
@@ -27,14 +27,14 @@
  * A {@link SchemaValidationStrategy} that checks that the {@link Schema} to
  * validate and the existing schema can mutually read each other according to
  * the default Avro schema resolution rules.
- * 
+ *
  */
 class ValidateMutualRead implements SchemaValidationStrategy {
 
   /**
    * Validate that the schemas provided can mutually read data written by each
    * other according to the default Avro schema resolution rules.
-   * 
+   *
    * @throws SchemaValidationException if the schemas are not mutually compatible.
    */
   @Override
@@ -47,7 +47,7 @@
   /**
    * Validates that data written with one schema can be read using another,
    * based on the default Avro schema resolution rules.
-   * 
+   *
    * @param writtenWith
    *          The "writer's" schema, representing data to be read.
    * @param readUsing
diff --git a/lang/java/avro/src/main/java/org/apache/avro/data/ErrorBuilder.java b/lang/java/avro/src/main/java/org/apache/avro/data/ErrorBuilder.java
index b55cfd0..e682acd 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/data/ErrorBuilder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/data/ErrorBuilder.java
@@ -19,28 +19,28 @@
 
 /** Interface for error builders */
 public interface ErrorBuilder<T> extends RecordBuilder<T> {
-  
+
   /** Gets the value */
   Object getValue();
-  
+
   /** Sets the value */
   ErrorBuilder<T> setValue(Object value);
-  
+
   /** Checks whether the value has been set */
   boolean hasValue();
-  
+
   /** Clears the value */
   ErrorBuilder<T> clearValue();
-  
+
   /** Gets the error cause */
   Throwable getCause();
-  
+
   /** Sets the error cause */
   ErrorBuilder<T> setCause(Throwable cause);
-  
+
   /** Checks whether the cause has been set */
   boolean hasCause();
-  
+
   /** Clears the cause */
   ErrorBuilder<T> clearCause();
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/data/Json.java b/lang/java/avro/src/main/java/org/apache/avro/data/Json.java
index 73a57c2..daa8482 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/data/Json.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/data/Json.java
@@ -73,7 +73,7 @@
       if (!SCHEMA.equals(schema))
         throw new RuntimeException("Not the Json schema: "+schema);
     }
-    
+
     @Override
     public void write(JsonNode datum, Encoder out) throws IOException {
       Json.write(datum, out);
@@ -173,7 +173,7 @@
 
   /** Note: this enum must be kept aligned with the union in Json.avsc. */
   private enum JsonType { LONG, DOUBLE, STRING, BOOLEAN, NULL, ARRAY, OBJECT }
-  
+
   /**
    * Write Json data as Avro data.
    * @deprecated internal method
diff --git a/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilder.java b/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilder.java
index 8c7a660..a01592c 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilder.java
@@ -22,7 +22,7 @@
 public interface RecordBuilder<T> {
   /**
    * Constructs a new instance using the values set in the RecordBuilder.
-   * If a particular value was not set and the schema defines a default 
+   * If a particular value was not set and the schema defines a default
    * value, the default value will be used.
    * @return a new instance using values set in the RecordBuilder.
    */
diff --git a/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilderBase.java b/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilderBase.java
index ca73b70..8e34a36 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilderBase.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilderBase.java
@@ -28,14 +28,14 @@
 import org.apache.avro.generic.IndexedRecord;
 
 /** Abstract base class for RecordBuilder implementations.  Not thread-safe. */
-public abstract class RecordBuilderBase<T extends IndexedRecord> 
+public abstract class RecordBuilderBase<T extends IndexedRecord>
   implements RecordBuilder<T> {
   private static final Field[] EMPTY_FIELDS = new Field[0];
   private final Schema schema;
   private final Field[] fields;
   private final boolean[] fieldSetFlags;
   private final GenericData data;
-  
+
   protected final Schema schema() { return schema; }
   protected final Field[] fields() { return fields; }
   protected final boolean[] fieldSetFlags() { return fieldSetFlags; }
@@ -51,7 +51,7 @@
     fields = (Field[]) schema.getFields().toArray(EMPTY_FIELDS);
     fieldSetFlags = new boolean[fields.length];
   }
-  
+
   /**
    * RecordBuilderBase copy constructor.
    * Makes a deep copy of the values in the other builder.
@@ -65,17 +65,17 @@
     System.arraycopy(
         other.fieldSetFlags, 0, fieldSetFlags, 0, fieldSetFlags.length);
   }
-  
+
   /**
-   * Validates that a particular value for a given field is valid according to 
+   * Validates that a particular value for a given field is valid according to
    * the following algorithm:
-   * 1. If the value is not null, or the field type is null, or the field type 
+   * 1. If the value is not null, or the field type is null, or the field type
    * is a union which accepts nulls, returns.
    * 2. Else, if the field has a default value, returns.
-   * 3. Otherwise throws AvroRuntimeException. 
+   * 3. Otherwise throws AvroRuntimeException.
    * @param field the field to validate.
    * @param value the value to validate.
-   * @throws NullPointerException if value is null and the given field does 
+   * @throws NullPointerException if value is null and the given field does
    * not accept null values.
    */
   protected void validate(Field field, Object value) {
@@ -92,7 +92,7 @@
   }
 
   /**
-   * Tests whether a value is valid for a specified field. 
+   * Tests whether a value is valid for a specified field.
    * @param f the field for which to test the value.
    * @param value the value to test.
    * @return true if the value is valid for the given field; false otherwise.
@@ -101,10 +101,10 @@
     if (value != null) {
       return true;
     }
-    
+
     Schema schema = f.schema();
     Type type = schema.getType();
-    
+
     // If the type is null, any value is valid
     if (type == Type.NULL) {
       return true;
@@ -118,20 +118,20 @@
         }
       }
     }
-    
+
     // The value is null but the type does not allow nulls
     return false;
   }
-  
+
   /**
    * Gets the default value of the given field, if any.
    * @param field the field whose default value should be retrieved.
-   * @return the default value associated with the given field, 
+   * @return the default value associated with the given field,
    * or null if none is specified in the schema.
-   * @throws IOException 
+   * @throws IOException
    */
   @SuppressWarnings({ "rawtypes", "unchecked" })
-  protected Object defaultValue(Field field) throws IOException {    
+  protected Object defaultValue(Field field) throws IOException {
     return data.deepCopy(field.schema(), data.getDefaultValue(field));
   }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/BZip2Codec.java b/lang/java/avro/src/main/java/org/apache/avro/file/BZip2Codec.java
index 09cf623..8dccfc3 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/BZip2Codec.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/BZip2Codec.java
@@ -69,11 +69,11 @@
       byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
 
       int readCount = -1;
-      
+
       while ( (readCount = inputStream.read(buffer, compressedData.position(), buffer.length))> 0) {
         baos.write(buffer, 0, readCount);
       }
-      
+
       ByteBuffer result = ByteBuffer.wrap(baos.toByteArray());
       return result;
     } finally {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/Codec.java b/lang/java/avro/src/main/java/org/apache/avro/file/Codec.java
index af5e013..c27b4b7 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/Codec.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/Codec.java
@@ -20,7 +20,7 @@
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
-/** 
+/**
  * Interface for Avro-supported compression codecs for data files.
  */
 public abstract class Codec {
@@ -30,14 +30,14 @@
   public abstract ByteBuffer compress(ByteBuffer uncompressedData) throws IOException;
   /** Decompress the data  */
   public abstract ByteBuffer decompress(ByteBuffer compressedData) throws IOException;
-  /** 
+  /**
    * Codecs must implement an equals() method.  Two codecs, A and B are equal
    * if: the result of A and B decompressing content compressed by A is the same
    * AND the retult of A and B decompressing content compressed by B is the same
    **/
   @Override
   public abstract boolean equals(Object other);
-  /** 
+  /**
    * Codecs must implement a hashCode() method that is consistent with equals().*/
   @Override
   public abstract int hashCode();
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/CodecFactory.java b/lang/java/avro/src/main/java/org/apache/avro/file/CodecFactory.java
index 6f25ea2..d178e13 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/CodecFactory.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/CodecFactory.java
@@ -67,11 +67,11 @@
 
   /** Creates internal Codec. */
   protected abstract Codec createInstance();
-  
-  /** Mapping of string names (stored as metas) and codecs. 
+
+  /** Mapping of string names (stored as metas) and codecs.
    * Note that currently options (like compression level)
    * are not recoverable. */
-  private static final Map<String, CodecFactory> REGISTERED = 
+  private static final Map<String, CodecFactory> REGISTERED =
     new HashMap<String, CodecFactory>();
 
   public static final int DEFAULT_DEFLATE_LEVEL = Deflater.DEFAULT_COMPRESSION;
@@ -103,7 +103,7 @@
     }
     return o;
   }
-  
+
 
 
   /** Adds a new codec implementation.  If name already had
@@ -111,11 +111,11 @@
   public static CodecFactory addCodec(String name, CodecFactory c) {
     return REGISTERED.put(name, c);
   }
-  
+
   @Override
   public String toString() {
     Codec instance = this.createInstance();
     return instance.toString();
   }
-  
+
 }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileConstants.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileConstants.java
index 4061962..265ac39 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileConstants.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileConstants.java
@@ -30,7 +30,7 @@
   };
   public static final long FOOTER_BLOCK = -1;
   public static final int SYNC_SIZE = 16;
-  public static final int DEFAULT_SYNC_INTERVAL = 4000*SYNC_SIZE; 
+  public static final int DEFAULT_SYNC_INTERVAL = 4000*SYNC_SIZE;
 
   public static final String SCHEMA = "avro.schema";
   public static final String CODEC = "avro.codec";
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader.java
index be12574..0d5e5c1 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader.java
@@ -59,7 +59,7 @@
       return new DataFileReader<D>(in, reader);
     if (Arrays.equals(DataFileReader12.MAGIC, magic)) // 1.2 format
       return new DataFileReader12<D>(in, reader);
-    
+
     throw new IOException("Not an Avro data file");
   }
 
@@ -166,7 +166,7 @@
     return blockStart;
   }
 
-  /** Return true if past the next synchronization point after a position. */ 
+  /** Return true if past the next synchronization point after a position. */
   @Override
   public boolean pastSync(long position) throws IOException {
     return ((blockStart >= position+SYNC_SIZE)||(blockStart >= sin.length()));
@@ -174,7 +174,7 @@
 
   @Override public long tell() throws IOException { return sin.tell(); }
 
-  static class SeekableInputStream extends InputStream 
+  static class SeekableInputStream extends InputStream
   implements SeekableInput {
     private final byte[] oneByte = new byte[1];
     private SeekableInput in;
@@ -182,7 +182,7 @@
     SeekableInputStream(SeekableInput in) throws IOException {
         this.in = in;
       }
-    
+
     @Override
     public void seek(long p) throws IOException {
       if (p < 0)
@@ -204,7 +204,7 @@
     public int read(byte[] b) throws IOException {
       return in.read(b, 0, b.length);
       }
-    
+
     @Override
     public int read(byte[] b, int off, int len) throws IOException {
       return in.read(b, off, len);
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader12.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader12.java
index 0194de0..54176df 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader12.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader12.java
@@ -39,7 +39,7 @@
   };
   private static final long FOOTER_BLOCK = -1;
   private static final int SYNC_SIZE = 16;
-  private static final int SYNC_INTERVAL = 1000*SYNC_SIZE; 
+  private static final int SYNC_INTERVAL = 1000*SYNC_SIZE;
 
   private static final String SCHEMA = "schema";
   private static final String SYNC = "sync";
@@ -160,8 +160,8 @@
       skipSync();                                 // skip a sync
 
       blockCount = vin.readLong();                // read blockCount
-         
-      if (blockCount == FOOTER_BLOCK) { 
+
+      if (blockCount == FOOTER_BLOCK) {
         seek(vin.readLong()+in.tell());           // skip a footer
       }
     }
@@ -208,7 +208,7 @@
     seek(in.length());
   }
 
-  /** Return true if past the next synchronization point after a position. */ 
+  /** Return true if past the next synchronization point after a position. */
   @Override
   public boolean pastSync(long position) throws IOException {
     return ((blockStart >= position+SYNC_SIZE)||(blockStart >= in.length()));
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileStream.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileStream.java
index 458a7df..bfc53a0 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileStream.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileStream.java
@@ -74,8 +74,8 @@
   byte[] syncBuffer = new byte[DataFileConstants.SYNC_SIZE];
   private Codec codec;
 
-  /** Construct a reader for an input stream.  For file-based input, use 
-   * {@link DataFileReader}.  This will buffer, wrapping with a 
+  /** Construct a reader for an input stream.  For file-based input, use
+   * {@link DataFileReader}.  This will buffer, wrapping with a
    * {@link java.io.BufferedInputStream}
    * is not necessary. */
   public DataFileStream(InputStream in, DatumReader<D> reader)
@@ -90,7 +90,7 @@
   protected DataFileStream(DatumReader<D> reader) throws IOException {
     this.reader = reader;
   }
-  
+
   /** Initialize the stream by reading from its head. */
   void initialize(InputStream in) throws IOException {
     this.header = new Header();
@@ -118,7 +118,7 @@
       } while ((l = vin.mapNext()) != 0);
     }
     vin.readFixed(header.sync);                          // read sync
-    
+
     // finalize the header
     header.metaKeyList = Collections.unmodifiableList(header.metaKeyList);
     header.schema = Schema.parse(getMetaString(DataFileConstants.SCHEMA),false);
@@ -319,22 +319,22 @@
       this.numEntries = numEntries;
       this.blockSize = blockSize;
     }
-    
+
     DataBlock(ByteBuffer block, long numEntries) {
       this.data = block.array();
       this.blockSize = block.remaining();
       this.offset = block.arrayOffset() + block.position();
       this.numEntries = numEntries;
     }
-    
+
     byte[] getData() {
       return data;
     }
-    
+
     long getNumEntries() {
       return numEntries;
     }
-    
+
     int getBlockSize() {
       return blockSize;
     }
@@ -346,23 +346,23 @@
     void setFlushOnWrite(boolean flushOnWrite) {
       this.flushOnWrite = flushOnWrite;
     }
-    
+
     ByteBuffer getAsByteBuffer() {
       return ByteBuffer.wrap(data, offset, blockSize);
     }
-    
+
     void decompressUsing(Codec c) throws IOException {
       ByteBuffer result = c.decompress(getAsByteBuffer());
       data = result.array();
       blockSize = result.remaining();
     }
-    
+
     void compressUsing(Codec c) throws IOException {
       ByteBuffer result = c.compress(getAsByteBuffer());
       data = result.array();
       blockSize = result.remaining();
     }
-    
+
     void writeBlockTo(BinaryEncoder e, byte[] sync) throws IOException {
       e.writeLong(this.numEntries);
       e.writeLong(this.blockSize);
@@ -372,7 +372,7 @@
         e.flush();
       }
     }
-    
+
   }
 }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
index 52fb895..fe916dc 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
@@ -77,16 +77,16 @@
   public DataFileWriter(DatumWriter<D> dout) {
     this.dout = dout;
   }
-  
+
   private void assertOpen() {
     if (!isOpen) throw new AvroRuntimeException("not open");
   }
   private void assertNotOpen() {
     if (isOpen) throw new AvroRuntimeException("already open");
   }
-  
-  /** 
-   * Configures this writer to use the given codec. 
+
+  /**
+   * Configures this writer to use the given codec.
    * May not be reset after writes have begun.
    */
   public DataFileWriter<D> setCodec(CodecFactory c) {
@@ -97,7 +97,7 @@
   }
 
   /**
-   * Set the synchronization interval for this file, in bytes. 
+   * Set the synchronization interval for this file, in bytes.
    * Valid values range from 32 to 2^30
    * Suggested values are between 2K and 2M
    *
@@ -108,12 +108,12 @@
    * called with param set to false, then the block may not be flushed to the
    * stream after the sync marker is written. In this case,
    * the {@linkplain #flush()} must be called to flush the stream.
-   * 
+   *
    * Invalid values throw IllegalArgumentException
-   * 
-   * @param syncInterval 
+   *
+   * @param syncInterval
    *   the approximate number of uncompressed bytes to write in each block
-   * @return 
+   * @return
    *   this DataFileWriter
    */
   public DataFileWriter<D> setSyncInterval(int syncInterval) {
@@ -251,7 +251,7 @@
     meta.put(key, value);
     return this;
   }
-  
+
   private DataFileWriter<D> setMetaInternal(String key, String value) {
     try {
       return setMetaInternal(key, value.getBytes("UTF-8"));
@@ -267,7 +267,7 @@
     }
     return setMetaInternal(key, value);
   }
-  
+
   public static boolean isReservedMeta(String key) {
     return key.startsWith("avro.");
   }
@@ -310,7 +310,7 @@
     blockCount++;
     writeIfBlockFull();
   }
-  
+
   // if there is an error encoding, flush the encoder and then
   // reset the buffer position to contain size bytes, discarding the rest.
   // Otherwise the file will be corrupt with a partial record.
@@ -330,7 +330,7 @@
     blockCount++;
     writeIfBlockFull();
   }
-  
+
   private int bufferInUse() {
     return (buffer.size() + bufOut.bytesBuffered());
   }
@@ -384,7 +384,7 @@
       }
     }
   }
-  
+
   private void writeBlock() throws IOException {
     if (blockCount > 0) {
       bufOut.flush();
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DeflateCodec.java b/lang/java/avro/src/main/java/org/apache/avro/file/DeflateCodec.java
index 2a0c8c5..f8f6ac4 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DeflateCodec.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DeflateCodec.java
@@ -26,8 +26,8 @@
 import java.util.zip.Inflater;
 import java.util.zip.InflaterOutputStream;
 
-/** 
- * Implements DEFLATE (RFC1951) compression and decompression. 
+/**
+ * Implements DEFLATE (RFC1951) compression and decompression.
  *
  * Note that there is a distinction between RFC1951 (deflate)
  * and RFC1950 (zlib).  zlib adds an extra 2-byte header
@@ -37,7 +37,7 @@
  * RFC1951.
  */
 class DeflateCodec extends Codec {
-  
+
   static class Option extends CodecFactory {
     private int compressionLevel;
 
@@ -55,7 +55,7 @@
   private Deflater deflater;
   private Inflater inflater;
   //currently only do 'nowrap' -- RFC 1951, not zlib
-  private boolean nowrap = true; 
+  private boolean nowrap = true;
   private int compressionLevel;
 
   public DeflateCodec(int compressionLevel) {
@@ -84,7 +84,7 @@
     ByteBuffer result = ByteBuffer.wrap(baos.toByteArray());
     return result;
   }
-  
+
   private void writeAndClose(ByteBuffer data, OutputStream to) throws IOException {
     byte[] input = data.array();
     int offset = data.arrayOffset() + data.position();
@@ -95,7 +95,7 @@
       to.close();
     }
   }
-  
+
   // get and initialize the inflater for use.
   private Inflater getInflater() {
     if (null == inflater) {
@@ -113,7 +113,7 @@
     deflater.reset();
     return deflater;
   }
-  
+
   // get and initialize the output buffer for use.
   private ByteArrayOutputStream getOutputBuffer(int suggestedLength) {
     if (null == outputBuffer) {
@@ -122,7 +122,7 @@
     outputBuffer.reset();
     return outputBuffer;
   }
-  
+
   @Override
   public int hashCode() {
     return nowrap ? 0 : 1;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/FileReader.java b/lang/java/avro/src/main/java/org/apache/avro/file/FileReader.java
index 19de11c..68c0102 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/FileReader.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/FileReader.java
@@ -40,7 +40,7 @@
    * #next()}. */
   void sync(long position) throws IOException;
 
-  /** Return true if past the next synchronization point after a position. */ 
+  /** Return true if past the next synchronization point after a position. */
   boolean pastSync(long position) throws IOException;
 
   /** Return the current position in the input. */
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/NullCodec.java b/lang/java/avro/src/main/java/org/apache/avro/file/NullCodec.java
index fd82d9b..e95f699 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/NullCodec.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/NullCodec.java
@@ -22,7 +22,7 @@
 
 /** Implements "null" (pass through) codec. */
 final class NullCodec extends Codec {
-  
+
   private static final NullCodec INSTANCE = new NullCodec();
 
   static class Option extends CodecFactory {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java b/lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java
index 0787050..1a5d252 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java
@@ -60,15 +60,15 @@
     int size = Snappy.uncompress(in.array(),in.position(),in.remaining()-4,
                                  out.array(), 0);
     out.limit(size);
-    
+
     crc32.reset();
     crc32.update(out.array(), 0, size);
     if (in.getInt(in.limit()-4) != (int)crc32.getValue())
       throw new IOException("Checksum failure");
-    
+
     return out;
   }
-  
+
   @Override public int hashCode() { return getName().hashCode(); }
 
   @Override
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericArray.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericArray.java
index 40b7e0f..e69e4ef 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericArray.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericArray.java
@@ -26,7 +26,7 @@
    * store an element, if any.  This permits reuse of arrays and their elements
    * without allocating new objects. */
   T peek();
-  
+
   /** Reverses the order of the elements in this array. */
   void reverse();
 }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java
index 2b01de4..09f4c5a 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java
@@ -60,7 +60,7 @@
 public class GenericData {
 
   private static final GenericData INSTANCE = new GenericData();
-  
+
   /** Used to specify the Java type for a string schema. */
   public enum StringType { CharSequence, String, Utf8 };
 
@@ -322,12 +322,12 @@
     public void reverse() {
       int left = 0;
       int right = elements.length - 1;
-      
+
       while (left < right) {
         Object tmp = elements[left];
         elements[left] = elements[right];
         elements[right] = tmp;
-        
+
         left++;
         right--;
       }
@@ -526,7 +526,7 @@
         toString(element, buffer);
         if (i++ < last)
           buffer.append(", ");
-      }        
+      }
       buffer.append("]");
     } else if (isMap(datum)) {
       buffer.append("{");
@@ -561,7 +561,7 @@
       buffer.append(datum);
     }
   }
-  
+
   /* Adapted from http://code.google.com/p/json-simple */
   private void writeEscapedString(CharSequence string, StringBuilder builder) {
     for(int i = 0; i < string.length(); i++){
@@ -658,7 +658,7 @@
   public void setField(Object record, String name, int position, Object o) {
     ((IndexedRecord)record).put(position, o);
   }
-  
+
   /** Called by {@link GenericDatumReader#readRecord} to retrieve a record
    * field value from a reused instance.  The default implementation is for
    * {@link IndexedRecord}.*/
@@ -675,7 +675,7 @@
   protected void setField(Object r, String n, int p, Object o, Object state) {
     setField(r, n, p, o);
   }
-  
+
   /** Version of {@link #getField} that has state. */
   protected Object getField(Object record, String name, int pos, Object state) {
     return getField(record, name, pos);
@@ -798,7 +798,7 @@
   protected boolean isEnum(Object datum) {
     return datum instanceof GenericEnumSymbol;
   }
-  
+
   /** Called to obtain the schema of a enum.  By default calls
    * {GenericContainer#getSchema().  May be overridden for alternate enum
    * representations. */
@@ -810,7 +810,7 @@
   protected boolean isMap(Object datum) {
     return datum instanceof Map;
   }
-  
+
   /** Called by the default implementation of {@link #instanceOf}.*/
   protected boolean isFixed(Object datum) {
     return datum instanceof GenericFixed;
@@ -867,7 +867,7 @@
   protected boolean isBoolean(Object datum) {
     return datum instanceof Boolean;
   }
-   
+
 
   /** Compute a hash code according to a schema, consistent with {@link
    * #compare(Object,Object,Schema)}. */
@@ -974,11 +974,11 @@
   /**
    * Gets the default value of the given field, if any.
    * @param field the field whose default value should be retrieved.
-   * @return the default value associated with the given field, 
+   * @return the default value associated with the given field,
    * or null if none is specified in the schema.
    */
   @SuppressWarnings({ "rawtypes", "unchecked" })
-  public Object getDefaultValue(Field field) {    
+  public Object getDefaultValue(Field field) {
     JsonNode json = field.defaultValue();
     if (json == null)
       throw new AvroRuntimeException("Field " + field
@@ -989,10 +989,10 @@
                 && field.schema().getTypes().get(0).getType() == Type.NULL))) {
       return null;
     }
-    
+
     // Check the cache
     Object defaultValue = defaultValueCache.get(field);
-    
+
     // If not cached, get the default Java value by encoding the default JSON
     // value and then decoding it:
     if (defaultValue == null)
@@ -1060,7 +1060,7 @@
         return value; // immutable
       case MAP:
         Map<CharSequence, Object> mapValue = (Map) value;
-        Map<CharSequence, Object> mapCopy = 
+        Map<CharSequence, Object> mapCopy =
           new HashMap<CharSequence, Object>(mapValue.size());
         for (Map.Entry<CharSequence, Object> entry : mapValue.entrySet()) {
           mapCopy.put((CharSequence)(deepCopy(STRINGS, entry.getKey())),
@@ -1086,11 +1086,11 @@
         if (value instanceof String) {
           return (T)value;
         }
-        
-        // Some CharSequence subclasses are mutable, so we still need to make 
+
+        // Some CharSequence subclasses are mutable, so we still need to make
         // a copy
         else if (value instanceof Utf8) {
-          // Utf8 copy constructor is more efficient than converting 
+          // Utf8 copy constructor is more efficient than converting
           // to string and then back to Utf8
           return (T)new Utf8((Utf8)value);
         }
@@ -1104,7 +1104,7 @@
             value + "\"");
     }
   }
-  
+
   /** Called to create an fixed value. May be overridden for alternate fixed
    * representations.  By default, returns {@link GenericFixed}. */
   public Object createFixed(Object old, Schema schema) {
@@ -1113,7 +1113,7 @@
       return old;
     return new GenericData.Fixed(schema);
   }
-  
+
   /** Called to create an fixed value. May be overridden for alternate fixed
    * representations.  By default, returns {@link GenericFixed}. */
   public Object createFixed(Object old, byte[] bytes, Schema schema) {
@@ -1121,7 +1121,7 @@
     System.arraycopy(bytes, 0, fixed.bytes(), 0, schema.getFixedSize());
     return fixed;
   }
-  
+
   /** Called to create an enum value. May be overridden for alternate enum
    * representations.  By default, returns a GenericEnumSymbol. */
   public Object createEnum(String symbol, Schema schema) {
@@ -1144,5 +1144,5 @@
     }
     return new GenericData.Record(schema);
   }
-  
+
 }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java
index 9417b22..6133d6b 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java
@@ -43,7 +43,7 @@
   private final GenericData data;
   private Schema actual;
   private Schema expected;
-  
+
   private ResolvingDecoder creatorResolver = null;
   private final Thread creator;
 
@@ -115,7 +115,7 @@
     ResolvingDecoder resolver;
     if (currThread == creator && creatorResolver != null) {
       return creatorResolver;
-    } 
+    }
 
     Map<Schema,ResolvingDecoder> cache = RESOLVER_CACHE.get().get(actual);
     if (cache == null) {
@@ -128,7 +128,7 @@
           Schema.applyAliases(actual, expected), expected, null);
       cache.put(expected, resolver);
     }
-    
+
     if (currThread == creator){
       creatorResolver = resolver;
     }
@@ -145,7 +145,7 @@
     resolver.drain();
     return result;
   }
-  
+
   /** Called to read data.*/
   protected Object read(Object old, Schema expected,
       ResolvingDecoder in) throws IOException {
@@ -215,11 +215,11 @@
 
   /** Called to read a record instance. May be overridden for alternate record
    * representations.*/
-  protected Object readRecord(Object old, Schema expected, 
+  protected Object readRecord(Object old, Schema expected,
       ResolvingDecoder in) throws IOException {
     Object r = data.newRecord(old, expected);
     Object state = data.getRecordState(r, expected);
-    
+
     for (Field f : in.readFieldOrder()) {
       int pos = f.pos();
       String name = f.name();
@@ -232,14 +232,14 @@
 
     return r;
   }
-  
-  /** Called to read a single field of a record. May be overridden for more 
+
+  /** Called to read a single field of a record. May be overridden for more
    * efficient or alternate implementations.*/
   protected void readField(Object r, Field f, Object oldDatum,
     ResolvingDecoder in, Object state) throws IOException {
     data.setField(r, f.name(), f.pos(), read(oldDatum, f.schema(), in), state);
   }
-  
+
   /** Called to read an enum value. May be overridden for alternate enum
    * representations.  By default, returns a GenericEnumSymbol. */
   protected Object readEnum(Schema expected, Decoder in) throws IOException {
@@ -299,7 +299,7 @@
   protected void addToArray(Object array, long pos, Object e) {
     ((Collection) array).add(e);
   }
-  
+
   /** Called to read a map instance.  May be overridden for alternate map
    * representations.*/
   protected Object readMap(Object old, Schema expected,
@@ -341,7 +341,7 @@
   protected void addToMap(Object map, Object key, Object value) {
     ((Map) map).put(key, value);
   }
-  
+
   /** Called to read a fixed value. May be overridden for alternate fixed
    * representations.  By default, returns {@link GenericFixed}. */
   protected Object readFixed(Object old, Schema expected, Decoder in)
@@ -350,11 +350,11 @@
     in.readFixed(fixed.bytes(), 0, expected.getFixedSize());
     return fixed;
   }
-  
-  /** 
+
+  /**
    * Called to create an fixed value. May be overridden for alternate fixed
    * representations.  By default, returns {@link GenericFixed}.
-   * @deprecated As of Avro 1.6.0 this method has been moved to 
+   * @deprecated As of Avro 1.6.0 this method has been moved to
    * {@link GenericData#createFixed(Object, Schema)}
    */
   @Deprecated
@@ -362,17 +362,17 @@
     return data.createFixed(old, schema);
   }
 
-  /** 
+  /**
    * Called to create an fixed value. May be overridden for alternate fixed
    * representations.  By default, returns {@link GenericFixed}.
-   * @deprecated As of Avro 1.6.0 this method has been moved to 
+   * @deprecated As of Avro 1.6.0 this method has been moved to
    * {@link GenericData#createFixed(Object, byte[], Schema)}
    */
   @Deprecated
   protected Object createFixed(Object old, byte[] bytes, Schema schema) {
     return data.createFixed(old, bytes, schema);
   }
-  
+
   /**
    * Called to create new record instances. Subclasses may override to use a
    * different record implementation. The returned instance must conform to the
@@ -380,7 +380,7 @@
    * schema, they should either be removed from the old object, or it should
    * create a new instance that conforms to the schema. By default, this returns
    * a {@link GenericData.Record}.
-   * @deprecated As of Avro 1.6.0 this method has been moved to 
+   * @deprecated As of Avro 1.6.0 this method has been moved to
    * {@link GenericData#newRecord(Object, Schema)}
    */
   @Deprecated
@@ -421,7 +421,7 @@
     if (stringClass == CharSequence.class)
       return readString(old, in);
     return newInstanceFromString(stringClass, in.readString());
-  }                  
+  }
 
   /** Called to read strings.  Subclasses may override to use a different
    * string representation.  By default, this calls {@link
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumWriter.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumWriter.java
index 7cfa022..63fa025 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumWriter.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumWriter.java
@@ -143,10 +143,10 @@
       writeField(datum, f, out, state);
     }
   }
-  
-  /** Called to write a single field of a record. May be overridden for more 
+
+  /** Called to write a single field of a record. May be overridden for more
    * efficient or alternate implementations.*/
-  protected void writeField(Object datum, Field f, Encoder out, Object state) 
+  protected void writeField(Object datum, Field f, Encoder out, Object state)
       throws IOException {
     Object value = data.getField(datum, f.name(), f.pos(), state);
     try {
@@ -155,7 +155,7 @@
       throw npe(e, " in field " + f.name());
     }
   }
-  
+
   /** Called to write an enum value.  May be overridden for alternate enum
    * representations.*/
   protected void writeEnum(Schema schema, Object datum, Encoder out)
@@ -164,7 +164,7 @@
       throw new AvroTypeException("Not an enum: "+datum);
     out.writeEnum(schema.getEnumOrdinal(datum.toString()));
   }
-  
+
   /** Called to write a array.  May be overridden for alternate array
    * representations.*/
   protected void writeArray(Schema schema, Object datum, Encoder out)
@@ -205,7 +205,7 @@
   protected Iterator<? extends Object> getArrayElements(Object array) {
     return ((Collection) array).iterator();
   }
-  
+
   /** Called to write a map.  May be overridden for alternate map
    * representations.*/
   protected void writeMap(Schema schema, Object datum, Encoder out)
@@ -241,7 +241,7 @@
   protected Iterable<Map.Entry<Object,Object>> getMapEntries(Object map) {
     return ((Map) map).entrySet();
   }
-  
+
   /** Called to write a string.  May be overridden for alternate string
    * representations.*/
   protected void writeString(Schema schema, Object datum, Encoder out)
@@ -266,7 +266,7 @@
     throws IOException {
     out.writeFixed(((GenericFixed)datum).bytes(), 0, schema.getFixedSize());
   }
-  
+
   private void error(Schema schema, Object datum) {
     throw new AvroTypeException("Not a "+schema+": "+datum);
   }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericRecordBuilder.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericRecordBuilder.java
index 2137104..22a49f5 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericRecordBuilder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericRecordBuilder.java
@@ -29,7 +29,7 @@
  * for fields if they are not specified.  */
 public class GenericRecordBuilder extends RecordBuilderBase<Record> {
   private final GenericData.Record record;
-  
+
   /**
    * Creates a GenericRecordBuilder for building Record instances.
    * @param schema the schema associated with the record class.
@@ -38,7 +38,7 @@
     super(schema, GenericData.get());
     record = new GenericData.Record(schema);
   }
-  
+
   /**
    * Creates a GenericRecordBuilder by copying an existing GenericRecordBuilder.
    * @param other the GenericRecordBuilder to copy.
@@ -47,7 +47,7 @@
     super(other, GenericData.get());
     record = new GenericData.Record(other.record, /* deepCopy = */ true);
   }
-  
+
   /**
    * Creates a GenericRecordBuilder by copying an existing record instance.
    * @param other the record instance to copy.
@@ -55,18 +55,18 @@
   public GenericRecordBuilder(Record other) {
     super(other.getSchema(), GenericData.get());
     record = new GenericData.Record(other, /* deepCopy = */ true);
-    
+
     // Set all fields in the RecordBuilder that are set in the record
     for (Field f : schema().getFields()) {
       Object value = other.get(f.pos());
-      // Only set the value if it is not null, if the schema type is null, 
+      // Only set the value if it is not null, if the schema type is null,
       // or if the schema type is a union that accepts nulls.
       if (isValidValue(f, value)) {
         set(f, data().deepCopy(f.schema(), value));
       }
     }
   }
-  
+
   /**
    * Gets the value of a field.
    * @param fieldName the name of the field to get.
@@ -75,7 +75,7 @@
   public Object get(String fieldName) {
     return get(schema().getField(fieldName));
   }
-  
+
   /**
    * Gets the value of a field.
    * @param field the field to get.
@@ -84,7 +84,7 @@
   public Object get(Field field) {
     return get(field.pos());
   }
-  
+
   /**
    * Gets the value of a field.
    * @param pos the position of the field to get.
@@ -93,7 +93,7 @@
   protected Object get(int pos) {
     return record.get(pos);
   }
-  
+
   /**
    * Sets the value of a field.
    * @param fieldName the name of the field to set.
@@ -103,7 +103,7 @@
   public GenericRecordBuilder set(String fieldName, Object value) {
     return set(schema().getField(fieldName), value);
   }
-  
+
   /**
    * Sets the value of a field.
    * @param field the field to set.
@@ -113,7 +113,7 @@
   public GenericRecordBuilder set(Field field, Object value) {
     return set(field, field.pos(), value);
   }
-  
+
   /**
    * Sets the value of a field.
    * @param pos the field to set.
@@ -123,7 +123,7 @@
   protected GenericRecordBuilder set(int pos, Object value) {
     return set(fields()[pos], pos, value);
   }
-  
+
   /**
    * Sets the value of a field.
    * @param field the field to set.
@@ -137,7 +137,7 @@
     fieldSetFlags()[pos] = true;
     return this;
   }
-  
+
   /**
    * Checks whether a field has been set.
    * @param fieldName the name of the field to check.
@@ -146,7 +146,7 @@
   public boolean has(String fieldName) {
     return has(schema().getField(fieldName));
   }
-  
+
   /**
    * Checks whether a field has been set.
    * @param field the field to check.
@@ -155,7 +155,7 @@
   public boolean has(Field field) {
     return has(field.pos());
   }
-  
+
   /**
    * Checks whether a field has been set.
    * @param pos the position of the field to check.
@@ -164,7 +164,7 @@
   protected boolean has(int pos) {
     return fieldSetFlags()[pos];
   }
-  
+
   /**
    * Clears the value of the given field.
    * @param fieldName the name of the field to clear.
@@ -173,7 +173,7 @@
   public GenericRecordBuilder clear(String fieldName) {
     return clear(schema().getField(fieldName));
   }
-  
+
   /**
    * Clears the value of the given field.
    * @param field the field to clear.
@@ -182,7 +182,7 @@
   public GenericRecordBuilder clear(Field field) {
     return clear(field.pos());
   }
-  
+
   /**
    * Clears the value of the given field.
    * @param pos the position of the field to clear.
@@ -193,7 +193,7 @@
     fieldSetFlags()[pos] = false;
     return this;
   }
-  
+
   @Override
   public Record build() {
     Record record;
@@ -202,7 +202,7 @@
     } catch (Exception e) {
       throw new AvroRuntimeException(e);
     }
-    
+
     for (Field field : fields()) {
       Object value;
       try {
@@ -214,22 +214,22 @@
         record.put(field.pos(), value);
       }
     }
-    
+
     return record;
   }
-  
+
   /**
    * Gets the value of the given field.
    * If the field has been set, the set value is returned (even if it's null).
-   * If the field hasn't been set and has a default value, the default value 
+   * If the field hasn't been set and has a default value, the default value
    * is returned.
    * @param field the field whose value should be retrieved.
-   * @return the value set for the given field, the field's default value, 
+   * @return the value set for the given field, the field's default value,
    * or null.
    * @throws IOException
    */
   private Object getWithDefault(Field field) throws IOException {
-    return fieldSetFlags()[field.pos()] ? 
+    return fieldSetFlags()[field.pos()] ?
         record.get(field.pos()) : defaultValue(field);
   }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryData.java b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryData.java
index 18cbf8a..3d4f79c 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryData.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryData.java
@@ -35,7 +35,7 @@
        this.d1 = new BinaryDecoder(new byte[0], 0, 0);
        this.d2 = new BinaryDecoder(new byte[0], 0, 0);
     }
-    public void set(byte[] data1, int off1, int len1, 
+    public void set(byte[] data1, int off1, int len1,
                     byte[] data2, int off2, int len2) {
       d1.setBuf(data1, off1, len1);
       d2.setBuf(data2, off2, len2);
@@ -283,7 +283,7 @@
     byte[] bytes = data.decoder.getBuf();
     int start = data.decoder.getPos();
     int end = start+len;
-    if (rev) 
+    if (rev)
       for (int i = end-1; i >= start; i--)
         hashCode = hashCode*31 + bytes[i];
     else
@@ -333,7 +333,7 @@
           }
         }
       }
-    } 
+    }
     buf[pos++] = (byte) n;
     return pos - start;
   }
@@ -395,7 +395,7 @@
   public static int encodeFloat(float f, byte[] buf, int pos) {
     int len = 1;
     int bits = Float.floatToRawIntBits(f);
-    // hotspot compiler works well with this variant 
+    // hotspot compiler works well with this variant
     buf[pos]         = (byte)((bits       ) & 0xFF);
     buf[pos + len++] = (byte)((bits >>>  8) & 0xFF);
     buf[pos + len++] = (byte)((bits >>> 16) & 0xFF);
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryDecoder.java
index 7877002..3711d2b 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryDecoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryDecoder.java
@@ -33,7 +33,7 @@
  * required to serve its read methods.
  * The number of unused bytes in the buffer can be accessed by
  * inputStream().remaining(), if the BinaryDecoder is not 'direct'.
- * 
+ *
  * @see Encoder
  */
 
@@ -191,7 +191,7 @@
     }
     return (l >>> 1) ^ -(l & 1); // back to two's-complement
   }
-  
+
   // splitting readLong up makes it faster because of the JVM does more
   // optimizations on small methods
   private long innerLongDecode(long l) throws IOException {
@@ -264,7 +264,7 @@
     }
     return result;
   }
-  
+
   private final Utf8 scratchUtf8 = new Utf8();
 
   @Override
@@ -325,7 +325,7 @@
 
   /**
    * Reads <tt>length</tt> bytes into <tt>bytes</tt> starting at <tt>start</tt>.
-   * 
+   *
    * @throws EOFException
    *           If there are not enough number of bytes in the source.
    * @throws IOException
@@ -354,7 +354,7 @@
    * Returns the number of items to follow in the current array or map. Returns
    * 0 if there are no more items in the current array and the array/map has
    * ended.
-   * 
+   *
    * @throws IOException
    */
   protected long doReadItemCount() throws IOException {
@@ -372,7 +372,7 @@
    * more items left in the array or map. If items cannot be skipped (because
    * byte count to skip is not found in the stream) return the count of the
    * items found. The client needs to skip the items individually.
-   * 
+   *
    * @return Zero if there are no more items to skip and end of array/map is
    *         reached. Positive number if some items are found that cannot be
    *         skipped and the client needs to skip them individually.
@@ -476,10 +476,10 @@
 
   /**
    * Returns an {@link java.io.InputStream} that is aware of any buffering that
-   * may occur in this BinaryDecoder. Readers that need to interleave decoding 
+   * may occur in this BinaryDecoder. Readers that need to interleave decoding
    * Avro data with other reads must access this InputStream to do so unless
    * the implementation is 'direct' and does not read beyond the minimum bytes
-   * necessary from the source.  
+   * necessary from the source.
    */
   public InputStream inputStream() {
     return source;
@@ -624,7 +624,7 @@
     /**
      * Skips length bytes from the source. If length bytes cannot be skipped due
      * to end of file/stream/channel/etc an EOFException is thrown
-     * 
+     *
      * @param length
      *          the number of bytes to attempt to skip
      * @throws IOException
@@ -639,7 +639,7 @@
      * actual number of bytes skipped. This method must attempt to skip as many
      * bytes as possible up to <i>skipLength</i> bytes. Skipping 0 bytes signals
      * end of stream/channel/file/etc
-     * 
+     *
      * @param skipLength
      *          the number of bytes to attempt to skip
      * @return the count of actual bytes skipped.
@@ -650,7 +650,7 @@
      * Reads raw from the source, into a byte[]. Used for reads that are larger
      * than the buffer, or otherwise unbuffered. This is a mandatory read -- if
      * there is not enough bytes in the source, EOFException is thrown.
-     * 
+     *
      * @throws IOException
      *           if an error occurs
      * @throws EOFException
@@ -666,7 +666,7 @@
      * <p/>
      * This method must attempt to read as much as possible from the source.
      * Returns 0 when at the end of stream/channel/file/etc.
-     * 
+     *
      * @throws IOException
      *           if an error occurs reading
      **/
@@ -677,9 +677,9 @@
      * If this source buffers, compacts the buffer by placing the
      * <i>remaining</i> bytes starting at <i>pos</i> at <i>minPos</i>. This may
      * be done in the current buffer, or may replace the buffer with a new one.
-     * 
+     *
      * The end result must be a buffer with at least 16 bytes of remaining space.
-     * 
+     *
      * @param pos
      * @param minPos
      * @param remaining
@@ -876,7 +876,7 @@
   /**
    * This byte source is special. It will avoid copying data by using the
    * source's byte[] as a buffer in the decoder.
-   * 
+   *
    */
   private static class ByteArrayByteSource extends ByteSource {
     private byte[] data;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryEncoder.java
index 7be0390..9e21a65 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryEncoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryEncoder.java
@@ -26,7 +26,7 @@
  * An abstract {@link Encoder} for Avro's binary encoding.
  * <p/>
  * To construct and configure instances, use {@link EncoderFactory}
- * 
+ *
  * @see EncoderFactory
  * @see BufferedBinaryEncoder
  * @see DirectBinaryEncoder
@@ -35,15 +35,15 @@
  * @see Decoder
  */
 public abstract class BinaryEncoder extends Encoder {
-  
+
   @Override
   public void writeNull() throws IOException {}
-  
+
   @Override
   public void writeString(Utf8 utf8) throws IOException {
     this.writeBytes(utf8.getBytes(), 0, utf8.getByteLength());
   }
-  
+
   @Override
   public void writeString(String string) throws IOException {
     if (0 == string.length()) {
@@ -65,7 +65,7 @@
       writeFixed(bytes);
     }
   }
-  
+
   @Override
   public void writeBytes(byte[] bytes, int start, int len) throws IOException {
     if (0 == len) {
@@ -75,7 +75,7 @@
     this.writeInt(len);
     this.writeFixed(bytes, start, len);
   }
-  
+
   @Override
   public void writeEnum(int e) throws IOException {
     this.writeInt(e);
@@ -90,7 +90,7 @@
       this.writeLong(itemCount);
     }
   }
-  
+
   @Override
   public void startItem() throws IOException {}
 
@@ -111,10 +111,10 @@
   public void writeIndex(int unionIndex) throws IOException {
     writeInt(unionIndex);
   }
-  
+
   /** Write a zero byte to the underlying output. **/
   protected abstract void writeZero() throws IOException;
-  
+
   /**
    * Returns the number of bytes currently buffered by this encoder. If this
    * Encoder does not buffer, this will always return zero.
@@ -122,6 +122,6 @@
    * Call {@link #flush()} to empty the buffer to the underlying output.
    */
   public abstract int bytesBuffered();
-  
+
 }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BlockingBinaryEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/BlockingBinaryEncoder.java
index e8b6c33..96a055d 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/BlockingBinaryEncoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BlockingBinaryEncoder.java
@@ -37,7 +37,7 @@
  * {@link #flush()} is called.
  * <p/>
  * BlockingBinaryEncoder is not thread-safe
- * 
+ *
  * @see BinaryEncoder
  * @see EncoderFactory
  * @see Encoder
@@ -100,7 +100,7 @@
 
     /** The state of this BlockedValue */
     public State state;
-    
+
     /** The location in the buffer where this blocked value starts */
     public int start;
 
@@ -109,7 +109,7 @@
      * is the first item, this is same as {@link #start}.
      */
     public int lastFullItem;
-    
+
     /**
      * Number of items in this blocked value that are stored
      * in the buffer.
@@ -126,7 +126,7 @@
       this.start = this.lastFullItem = 0;
       this.items = 1; // Makes various assertions work out
     }
-    
+
     /**
      * Check invariants of <code>this</code> and also the
      * <code>BlockedValue</code> containing <code>this</code>.
@@ -167,12 +167,12 @@
    * stream.
    */
   private byte[] buf;
-  
+
   /**
    * Index into the location in {@link #buf}, where next byte can be written.
    */
   private int pos;
-  
+
   /**
    * The state stack.
    */
@@ -232,11 +232,11 @@
     if (null == buf || buf.length != blockBufferSize) {
       buf = new byte[blockBufferSize];
     }
-    
+
     assert check();
     return this;
   }
-  
+
   @Override
   public void flush() throws IOException {
       BlockedValue bv = blockStack[stackTop];
@@ -270,7 +270,7 @@
     ensureBounds(10);
     pos += BinaryData.encodeLong(n, buf, pos);
   }
-    
+
   @Override
   public void writeFloat(float f) throws IOException {
     ensureBounds(4);
@@ -287,7 +287,7 @@
   public void writeFixed(byte[] bytes, int start, int len) throws IOException {
     doWriteBytes(bytes, start, len);
   }
-  
+
   @Override
   protected void writeZero() throws IOException {
     ensureBounds(1);
@@ -318,7 +318,7 @@
 
     assert check();
   }
-  
+
   @Override
   public void startItem() throws IOException {
     if (blockStack[stackTop].state == BlockedValue.State.OVERFLOW) {
@@ -371,7 +371,7 @@
       throw new AvroTypeException("Failed to read write expected number of array elements.");
     }
     endBlockedValue();
-    
+
     assert check();
   }
 
@@ -385,7 +385,7 @@
   public int bytesBuffered() {
     return pos + super.bytesBuffered();
   }
-  
+
   private void endBlockedValue() throws IOException {
     for (; ;) {
       assert check();
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BufferedBinaryEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/BufferedBinaryEncoder.java
index cb0758c..82a36f9 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/BufferedBinaryEncoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BufferedBinaryEncoder.java
@@ -38,7 +38,7 @@
  * To construct or reconfigure, use
  * {@link EncoderFactory#binaryEncoder(OutputStream, BinaryEncoder)}.
  * <p/>
- * To change the buffer size, configure the factory instance used to 
+ * To change the buffer size, configure the factory instance used to
  * create instances with {@link EncoderFactory#configureBufferSize(int)}
  *  @see Encoder
  *  @see EncoderFactory
@@ -54,7 +54,7 @@
   BufferedBinaryEncoder(OutputStream out, int bufferSize) {
     configure(out, bufferSize);
   }
-  
+
   BufferedBinaryEncoder configure(OutputStream out, int bufferSize) {
     if (null == out)
       throw new NullPointerException("OutputStream cannot be null!");
@@ -85,7 +85,7 @@
     sink.innerFlush();
   }
 
-  /** Flushes the internal buffer to the underlying output. 
+  /** Flushes the internal buffer to the underlying output.
    * Does not flush the underlying output.
    */
   private void flushBuffer() throws IOException {
@@ -154,7 +154,7 @@
     System.arraycopy(bytes, start, buf, pos, len);
     pos += len;
   }
-  
+
   @Override
   public void writeFixed(ByteBuffer bytes) throws IOException {
     if (!bytes.hasArray() && bytes.remaining() > bulkLimit) {
@@ -169,7 +169,7 @@
   protected void writeZero() throws IOException {
     writeByte(0);
   }
-  
+
   private void writeByte(int b) throws IOException {
     if (pos == buf.length) {
       flushBuffer();
@@ -195,13 +195,13 @@
     protected ByteSink() {}
     /** Write data from bytes, starting at off, for len bytes **/
     protected abstract void innerWrite(byte[] bytes, int off, int len) throws IOException;
-    
+
     protected abstract void innerWrite(ByteBuffer buff) throws IOException;
-    
+
     /** Flush the underlying output, if supported **/
     protected abstract void innerFlush() throws IOException;
   }
-  
+
   static class OutputStreamSink extends ByteSink {
     private final OutputStream out;
     private final WritableByteChannel channel;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/Decoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/Decoder.java
index e924de5..46d3e9e 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/Decoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/Decoder.java
@@ -88,14 +88,14 @@
    *           is not the type of the next value to be read
    */
   public abstract double readDouble() throws IOException;
-    
+
   /**
    * Reads a char-string written by {@link Encoder#writeString}.
    * @throws AvroTypeException If this is a stateful reader and
    * char-string is not the type of the next value to be read
    */
   public abstract Utf8 readString(Utf8 old) throws IOException;
-    
+
   /**
    * Reads a char-string written by {@link Encoder#writeString}.
    * @throws AvroTypeException If this is a stateful reader and
@@ -125,7 +125,7 @@
    *          byte-string is not the type of the next value to be read
    */
   public abstract void skipBytes() throws IOException;
-  
+
   /**
    * Reads fixed sized binary object.
    * @param bytes The buffer to store the contents being read.
@@ -149,7 +149,7 @@
   public void readFixed(byte[] bytes) throws IOException {
     readFixed(bytes, 0, bytes.length);
   }
-  
+
   /**
    * Discards fixed sized binary object.
    * @param length  The size of the binary object to be skipped.
@@ -168,7 +168,7 @@
    * @throws IOException
    */
   public abstract int readEnum() throws IOException;
-  
+
   /**
    * Reads and returns the size of the first block of an array.  If
    * this method returns non-zero, then the caller should read the
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/DecoderFactory.java b/lang/java/avro/src/main/java/org/apache/avro/io/DecoderFactory.java
index c777614..2e874ad 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/DecoderFactory.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/DecoderFactory.java
@@ -28,7 +28,7 @@
  * Factories are thread-safe, and are generally cached by applications for
  * performance reasons. Multiple instances are only required if multiple
  * concurrent configurations are needed.
- * 
+ *
  * @see Decoder
  */
 
@@ -50,7 +50,7 @@
   public static DecoderFactory defaultFactory() {
     return get();
   }
-  
+
   /**
    * Returns an immutable static DecoderFactory configured with default settings
    * All mutating methods throw IllegalArgumentExceptions. All creator methods
@@ -64,7 +64,7 @@
    * Configures this factory to use the specified buffer size when creating
    * Decoder instances that buffer their input. The default buffer size is
    * 8192 bytes.
-   * 
+   *
    * @param size The preferred buffer size. Valid values are in the range [32,
    *          16*1024*1024]. Values outside this range are rounded to the nearest
    *          value in the range. Values less than 512 or greater than 1024*1024
@@ -82,7 +82,7 @@
     this.binaryDecoderBufferSize = size;
     return this;
   }
-  
+
   /**
    * Returns this factory's configured preferred buffer size.  Used when creating
    * Decoder instances that buffer. See {@link #configureDecoderBufferSize}
@@ -91,14 +91,14 @@
   public int getConfiguredBufferSize() {
     return this.binaryDecoderBufferSize;
   }
-  
+
   /** @deprecated use the equivalent
    *  {@link #binaryDecoder(InputStream, BinaryDecoder)} instead */
   @Deprecated
   public BinaryDecoder createBinaryDecoder(InputStream in, BinaryDecoder reuse) {
     return binaryDecoder(in, reuse);
   }
-  
+
   /**
    * Creates or reinitializes a {@link BinaryDecoder} with the input stream
    * provided as the source of data. If <i>reuse</i> is provided, it will be
@@ -113,7 +113,7 @@
    * {@link BinaryDecoder#inputStream()} provides a view on the data that is
    * buffer-aware, for users that need to interleave access to data
    * with the Decoder API.
-   * 
+   *
    * @param in
    *          The InputStream to initialize to
    * @param reuse
@@ -135,7 +135,7 @@
       return ((BinaryDecoder)reuse).configure(in, binaryDecoderBufferSize);
     }
   }
-  
+
   /**
    * Creates or reinitializes a {@link BinaryDecoder} with the input stream
    * provided as the source of data. If <i>reuse</i> is provided, it will be
@@ -149,7 +149,7 @@
    * A "direct" BinaryDecoder does not read ahead from an InputStream or other data source
    * that cannot be rewound.  From the perspective of a client, a "direct" decoder
    * must never read beyond the minimum necessary bytes to service a {@link BinaryDecoder}
-   * API read request.  
+   * API read request.
    * <p/>
    * In the case that the improved performance of a buffering implementation does not outweigh the
    * inconvenience of its buffering semantics, a "direct" decoder can be
@@ -187,7 +187,7 @@
       return reuse.configure(bytes, offset, length);
     }
   }
-  
+
   /**
    * Creates or reinitializes a {@link BinaryDecoder} with the byte array
    * provided as the source of data. If <i>reuse</i> is provided, it will
@@ -197,7 +197,7 @@
    * {@link BinaryDecoder#inputStream()} provides a view on the data that is
    * buffer-aware and can provide a view of the data not yet read by Decoder API
    * methods.
-   * 
+   *
    * @param bytes The byte array to initialize to
    * @param offset The offset to start reading from
    * @param length The maximum number of bytes to read from the byte array
@@ -223,7 +223,7 @@
   public BinaryDecoder createBinaryDecoder(byte[] bytes, BinaryDecoder reuse) {
     return binaryDecoder(bytes, 0, bytes.length, reuse);
   }
-  
+
   /**
    * This method is shorthand for
    * <pre>
@@ -238,7 +238,7 @@
    * Creates a {@link JsonDecoder} using the InputStrim provided for reading
    * data that conforms to the Schema provided.
    * <p/>
-   * 
+   *
    * @param schema
    *          The Schema for data read from this JsonEncoder. Cannot be null.
    * @param input
@@ -250,12 +250,12 @@
       throws IOException {
     return new JsonDecoder(schema, input);
   }
-  
+
   /**
    * Creates a {@link JsonDecoder} using the String provided for reading data
    * that conforms to the Schema provided.
    * <p/>
-   * 
+   *
    * @param schema
    *          The Schema for data read from this JsonEncoder. Cannot be null.
    * @param input
@@ -272,7 +272,7 @@
    * Creates a {@link ValidatingDecoder} wrapping the Decoder provided. This
    * ValidatingDecoder will ensure that operations against it conform to the
    * schema provided.
-   * 
+   *
    * @param schema
    *          The Schema to validate against. Cannot be null.
    * @param wrapped
@@ -290,7 +290,7 @@
    * Creates a {@link ResolvingDecoder} wrapping the Decoder provided. This
    * ResolvingDecoder will resolve input conforming to the <i>writer</i> schema
    * from the wrapped Decoder, and present it as the <i>reader</i> schema.
-   * 
+   *
    * @param writer
    *          The Schema that the source data is in. Cannot be null.
    * @param reader
@@ -306,7 +306,7 @@
       Decoder wrapped) throws IOException {
     return new ResolvingDecoder(writer, reader, wrapped);
   }
-  
+
   private static class DefaultDecoderFactory extends DecoderFactory {
     @Override
     public DecoderFactory configureDecoderBufferSize(int bufferSize) {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryDecoder.java
index 5bc1760..07c6199 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryDecoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryDecoder.java
@@ -54,11 +54,11 @@
 
   private class ReuseByteReader extends ByteReader {
     private final ByteBufferInputStream bbi;
-    
+
     public ReuseByteReader(ByteBufferInputStream bbi) {
       this.bbi = bbi;
     }
-    
+
     @Override
     public ByteBuffer read(ByteBuffer old, int length) throws IOException {
       if (old != null) {
@@ -67,7 +67,7 @@
         return bbi.readBuffer(length);
       }
     }
-    
+
   }
 
   private ByteReader byteReader;
@@ -111,7 +111,7 @@
       shift += 7;
     } while (shift < 32);
     throw new IOException("Invalid int encoding");
-    
+
   }
 
   @Override
@@ -119,7 +119,7 @@
     long n = 0;
     int b;
     int shift = 0;
-    do { 
+    do {
       b = in.read();
       if (b >= 0) {
          n |= (b & 0x7FL) << shift;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryEncoder.java
index 1126742..ff5b52c 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryEncoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryEncoder.java
@@ -43,7 +43,7 @@
   private final byte[] buf = new byte[12];
 
   /** Create a writer that sends its output to the underlying stream
-   *  <code>out</code>. 
+   *  <code>out</code>.
    **/
   DirectBinaryEncoder(OutputStream out) {
     configure(out);
@@ -84,7 +84,7 @@
   }
 
   /* buffering is slower for writeLong when the number is small enough to
-   * fit in an int. 
+   * fit in an int.
    * (Sun JRE 1.6u22, x64 -server) */
   @Override
   public void writeLong(long n) throws IOException {
@@ -101,7 +101,7 @@
     int len = BinaryData.encodeLong(n, buf, 0);
     out.write(buf, 0, len);
   }
-  
+
   @Override
   public void writeFloat(float f) throws IOException {
     int len = BinaryData.encodeFloat(f, buf, 0);
@@ -124,7 +124,7 @@
   protected void writeZero() throws IOException {
     out.write(0);
   }
-  
+
   @Override
   public int bytesBuffered() {
     return 0;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/Encoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/Encoder.java
index c3647c0..4148afb 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/Encoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/Encoder.java
@@ -40,7 +40,7 @@
  * (See {@link #writeArrayStart} for details on these methods.)
  * <p/>
  * {@link EncoderFactory} contains Encoder construction and configuration
- * facilities.  
+ * facilities.
  *  @see EncoderFactory
  *  @see Decoder
  */
@@ -53,7 +53,7 @@
    *         null is not expected
    */
   public abstract void writeNull() throws IOException;
-  
+
   /**
    * Write a boolean value.
    * @throws AvroTypeException If this is a stateful writer and a
@@ -74,9 +74,9 @@
    * long is not expected
    */
   public abstract void writeLong(long n) throws IOException;
-  
+
   /** Write a float.
-   * @throws IOException 
+   * @throws IOException
    * @throws AvroTypeException If this is a stateful writer and a
    * float is not expected
    */
@@ -98,7 +98,7 @@
 
   /**
    * Write a Unicode character string.  The default implementation converts
-   * the String to a {@link org.apache.avro.util.Utf8}.  Some Encoder 
+   * the String to a {@link org.apache.avro.util.Utf8}.  Some Encoder
    * implementations may want to do something different as a performance optimization.
    * @throws AvroTypeException If this is a stateful writer and a
    * char-string is not expected
@@ -120,25 +120,25 @@
     else
       writeString(charSequence.toString());
   }
-  
+
   /**
    * Write a byte string.
    * @throws AvroTypeException If this is a stateful writer and a
    *         byte-string is not expected
    */
   public abstract void writeBytes(ByteBuffer bytes) throws IOException;
-  
+
   /**
    * Write a byte string.
    * @throws AvroTypeException If this is a stateful writer and a
    * byte-string is not expected
    */
   public abstract void writeBytes(byte[] bytes, int start, int len) throws IOException;
-  
+
   /**
    * Writes a byte string.
    * Equivalent to <tt>writeBytes(bytes, 0, bytes.length)</tt>
-   * @throws IOException 
+   * @throws IOException
    * @throws AvroTypeException If this is a stateful writer and a
    * byte-string is not expected
    */
@@ -165,7 +165,7 @@
   public void writeFixed(byte[] bytes) throws IOException {
     writeFixed(bytes, 0, bytes.length);
   }
-  
+
   /** Writes a fixed from a ByteBuffer. */
   public void writeFixed(ByteBuffer bytes) throws IOException {
     int pos = bytes.position();
@@ -225,12 +225,12 @@
    * to {@link #startItem()} must be equal to the count specified
    * in {@link #setItemCount}. Once a batch is completed you
    * can start another batch with {@link #setItemCount}.
-   * 
+   *
    * @param itemCount The number of {@link #startItem()} calls to follow.
    * @throws IOException
    */
   public abstract void setItemCount(long itemCount) throws IOException;
-  
+
   /**
    * Start a new item of an array or map.
    * See {@link #writeArrayStart} for usage information.
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/EncoderFactory.java b/lang/java/avro/src/main/java/org/apache/avro/io/EncoderFactory.java
index 679daa7..9ba1acc 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/EncoderFactory.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/EncoderFactory.java
@@ -30,7 +30,7 @@
  * Factory methods that create Encoder instances are thread-safe.
  * Multiple instances with different configurations can be cached
  * by an application.
- * 
+ *
  * @see Encoder
  * @see BinaryEncoder
  * @see JsonEncoder
@@ -46,9 +46,9 @@
   private static final int MIN_BLOCK_BUFFER_SIZE = 64;
   private static final int MAX_BLOCK_BUFFER_SIZE = 1024 * 1024 * 1024;
 
-  private static final EncoderFactory DEFAULT_FACTORY = 
+  private static final EncoderFactory DEFAULT_FACTORY =
     new DefaultEncoderFactory();
-  
+
   protected int binaryBufferSize = DEFAULT_BUFFER_SIZE;
   protected int binaryBlockSize = DEFAULT_BLOCK_BUFFER_SIZE;
 
@@ -59,12 +59,12 @@
   public static EncoderFactory get() {
     return DEFAULT_FACTORY;
   }
-  
+
   /**
    * Configures this factory to use the specified buffer size when creating
    * Encoder instances that buffer their output. The default buffer size is 2048
    * bytes.
-   * 
+   *
    * @param size
    *          The buffer size to configure new instances with. Valid values are
    *          in the range [32, 16*1024*1024]. Values outside this range are set
@@ -86,7 +86,7 @@
     this.binaryBufferSize = size;
     return this;
   }
-  
+
   /**
    * Returns this factory's configured default buffer size.  Used when creating
    * Encoder instances that buffer writes.
@@ -101,7 +101,7 @@
   /**
    * Configures this factory to construct blocking BinaryEncoders with the
    * specified block buffer size. The default buffer size is 64 * 1024 bytes.
-   * 
+   *
    * @param size
    *          The preferred block size for array blocking. Arrays larger than
    *          this size will be segmented into blocks according to the Avro
@@ -115,7 +115,7 @@
    * @see #blockingBinaryEncoder(OutputStream, BinaryEncoder)
    */
   public EncoderFactory configureBlockSize(int size) {
-    if (size < MIN_BLOCK_BUFFER_SIZE) 
+    if (size < MIN_BLOCK_BUFFER_SIZE)
       size = MIN_BLOCK_BUFFER_SIZE;
     if (size > MAX_BLOCK_BUFFER_SIZE)
       size = MAX_BLOCK_BUFFER_SIZE;
@@ -124,7 +124,7 @@
   }
 
   /**
-   * Returns this factory's configured default block buffer size.  
+   * Returns this factory's configured default block buffer size.
    * {@link BinaryEncoder} instances created with
    * #blockingBinaryEncoder(OutputStream, BinaryEncoder)
    * will have block buffers of this size.
@@ -136,7 +136,7 @@
   public int getBlockSize() {
     return this.binaryBlockSize;
   }
-  
+
   /**
    * Creates or reinitializes a {@link BinaryEncoder} with the OutputStream
    * provided as the destination for written data. If <i>reuse</i> is provided,
@@ -147,11 +147,11 @@
    * Data may not appear on the underlying OutputStream until
    * {@link Encoder#flush()} is called.  The buffer size is configured with
    * {@link #configureBufferSize(int)}.
-   * </p>  If buffering is not desired, and lower performance is acceptable, use 
+   * </p>  If buffering is not desired, and lower performance is acceptable, use
    * {@link #directBinaryEncoder(OutputStream, BinaryEncoder)}
    * <p/>
    * {@link BinaryEncoder} instances returned by this method are not thread-safe
-   * 
+   *
    * @param out
    *          The OutputStream to write to.  Cannot be null.
    * @param reuse
@@ -163,7 +163,7 @@
    *         <i>reuse</i> is null, this will be a new instance. If <i>reuse</i>
    *         is not null, then the returned instance may be a new instance or
    *         <i>reuse</i> reconfigured to use <i>out</i>.
-   * @throws IOException 
+   * @throws IOException
    * @see BufferedBinaryEncoder
    * @see Encoder
    */
@@ -193,7 +193,7 @@
    * would not be useful.
    * <p/>
    * {@link BinaryEncoder} instances returned by this method are not thread-safe.
-   * 
+   *
    * @param out
    *          The OutputStream to initialize to. Cannot be null.
    * @param reuse
@@ -215,7 +215,7 @@
       return ((DirectBinaryEncoder)reuse).configure(out);
     }
   }
-  
+
   /**
    * Creates or reinitializes a {@link BinaryEncoder} with the OutputStream
    * provided as the destination for written data. If <i>reuse</i> is provided,
@@ -230,10 +230,10 @@
    * delimited with byte sizes for Arrays and Maps.  This allows for some decoders
    * to skip over large Arrays or Maps without decoding the contents, but adds
    * some overhead.  The default block size is configured with
-   * {@link #configureBlockSize(int)} 
+   * {@link #configureBlockSize(int)}
    * <p/>
    * {@link BinaryEncoder} instances returned by this method are not thread-safe.
-   * 
+   *
    * @param out
    *          The OutputStream to initialize to. Cannot be null.
    * @param reuse
@@ -269,7 +269,7 @@
    * underlying OutputStream until {@link Encoder#flush()} is called.
    * <p/>
    * {@link JsonEncoder} is not thread-safe.
-   * 
+   *
    * @param schema
    *          The Schema for data written to this JsonEncoder. Cannot be null.
    * @param out
@@ -290,7 +290,7 @@
    * underlying OutputStream until {@link Encoder#flush()} is called.
    * <p/>
    * {@link JsonEncoder} is not thread-safe.
-   * 
+   *
    * @param schema
    *          The Schema for data written to this JsonEncoder. Cannot be null.
    * @param out
@@ -313,7 +313,7 @@
    * underlying output until {@link Encoder#flush()} is called.
    * <p/>
    * {@link JsonEncoder} is not thread-safe.
-   * 
+   *
    * @param schema
    *          The Schema for data written to this JsonEncoder. Cannot be null.
    * @param gen
@@ -327,7 +327,7 @@
       throws IOException {
     return new JsonEncoder(schema, gen);
   }
-  
+
   /**
    * Creates a {@link ValidatingEncoder} that wraps the Encoder provided.
    * This ValidatingEncoder will ensure that operations against it conform
@@ -337,7 +337,7 @@
    * underlying output until {@link Encoder#flush()} is called.
    * <p/>
    * {@link ValidatingEncoder} is not thread-safe.
-   * 
+   *
    * @param schema
    *          The Schema to validate operations against. Cannot be null.
    * @param encoder
@@ -350,7 +350,7 @@
       throws IOException {
     return new ValidatingEncoder(schema, encoder);
   }
-  
+
   // default encoder is not mutable
   private static class DefaultEncoderFactory extends EncoderFactory {
     @Override
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java
index 8206181..8045199 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java
@@ -43,7 +43,7 @@
 import org.codehaus.jackson.JsonToken;
 import org.codehaus.jackson.ObjectCodec;
 
-/** A {@link Decoder} for Avro's JSON data encoding. 
+/** A {@link Decoder} for Avro's JSON data encoding.
  * </p>
  * Construct using {@link DecoderFactory}.
  * </p>
@@ -54,20 +54,20 @@
   private JsonParser in;
   private static JsonFactory jsonFactory = new JsonFactory();
   Stack<ReorderBuffer> reorderBuffers = new Stack<ReorderBuffer>();
-  ReorderBuffer currentReorderBuffer; 
-  
+  ReorderBuffer currentReorderBuffer;
+
   private static class ReorderBuffer {
     public Map<String, List<JsonElement>> savedFields = new HashMap<String, List<JsonElement>>();
-    public JsonParser origParser = null; 
+    public JsonParser origParser = null;
   }
-  
+
   static final String CHARSET = "ISO-8859-1";
 
   private JsonDecoder(Symbol root, InputStream in) throws IOException {
     super(root);
     configure(in);
   }
-  
+
   private JsonDecoder(Symbol root, String in) throws IOException {
     super(root);
     configure(in);
@@ -76,11 +76,11 @@
   JsonDecoder(Schema schema, InputStream in) throws IOException {
     this(getSymbol(schema), in);
   }
-  
+
   JsonDecoder(Schema schema, String in) throws IOException {
     this(getSymbol(schema), in);
   }
-  
+
   private static Symbol getSymbol(Schema schema) {
     if (null == schema) {
       throw new NullPointerException("Schema cannot be null!");
@@ -109,7 +109,7 @@
     this.in.nextToken();
     return this;
   }
-  
+
   /**
    * Reconfigures this JsonDecoder to use the String provided for input.
    * <p/>
@@ -152,7 +152,7 @@
   @Override
   public boolean readBoolean() throws IOException {
     advance(Symbol.BOOLEAN);
-    JsonToken t = in.getCurrentToken(); 
+    JsonToken t = in.getCurrentToken();
     if (t == JsonToken.VALUE_TRUE || t == JsonToken.VALUE_FALSE) {
       in.nextToken();
       return t == JsonToken.VALUE_TRUE;
@@ -172,7 +172,7 @@
       throw error("int");
     }
   }
-    
+
   @Override
   public long readLong() throws IOException {
     advance(Symbol.LONG);
@@ -208,7 +208,7 @@
       throw error("double");
     }
   }
-    
+
   @Override
   public Utf8 readString(Utf8 old) throws IOException {
     return new Utf8(readString());
@@ -284,7 +284,7 @@
         top.size + " but received " + size + " bytes.");
     }
   }
-    
+
   @Override
   public void readFixed(byte[] bytes, int start, int len) throws IOException {
     checkFixed(len);
@@ -377,7 +377,7 @@
     if (in.getCurrentToken() == JsonToken.START_ARRAY) {
       in.skipChildren();
       in.nextToken();
-      advance(Symbol.ARRAY_END);    
+      advance(Symbol.ARRAY_END);
     } else {
       throw error("array-start");
     }
@@ -417,7 +417,7 @@
     if (in.getCurrentToken() == JsonToken.START_OBJECT) {
       in.skipChildren();
       in.nextToken();
-      advance(Symbol.MAP_END);    
+      advance(Symbol.MAP_END);
     } else {
       throw error("map-start");
     }
@@ -428,7 +428,7 @@
   public int readIndex() throws IOException {
     advance(Symbol.UNION);
     Symbol.Alternative a = (Symbol.Alternative) parser.popSymbol();
-    
+
     String label;
     if (in.getCurrentToken() == JsonToken.VALUE_NULL) {
       label = "null";
@@ -514,12 +514,12 @@
       this.token = t;
       this.value = value;
     }
-    
+
     public JsonElement(JsonToken t) {
       this(t, null);
     }
   }
-  
+
   private static List<JsonElement> getVaueAsTree(JsonParser in) throws IOException {
     int level = 0;
     List<JsonElement> result = new ArrayList<JsonElement>();
@@ -685,7 +685,7 @@
         throws IOException {
         throw new UnsupportedOperationException();
       }
-      
+
       @Override
       public JsonToken getCurrentToken() {
         return elements.get(pos).token;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java
index 9d413e6..2c2b0c1 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java
@@ -34,7 +34,7 @@
 import org.codehaus.jackson.util.DefaultPrettyPrinter;
 import org.codehaus.jackson.util.MinimalPrettyPrinter;
 
-/** An {@link Encoder} for Avro's JSON data encoding. 
+/** An {@link Encoder} for Avro's JSON data encoding.
  * </p>
  * Construct using {@link EncoderFactory}.
  * </p>
@@ -79,7 +79,7 @@
   private static JsonGenerator getJsonGenerator(OutputStream out, boolean pretty)
       throws IOException {
     if (null == out)
-      throw new NullPointerException("OutputStream cannot be null"); 
+      throw new NullPointerException("OutputStream cannot be null");
     JsonGenerator g
       = new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
     if (pretty) {
@@ -99,7 +99,7 @@
     }
     return g;
   }
-  
+
   /**
    * Reconfigures this JsonEncoder to use the output stream provided.
    * <p/>
@@ -108,7 +108,7 @@
    * Otherwise, this JsonEncoder will flush its current output and then
    * reconfigure its output to use a default UTF8 JsonGenerator that writes
    * to the provided OutputStream.
-   * 
+   *
    * @param out
    *          The OutputStream to direct output to. Cannot be null.
    * @throws IOException
@@ -118,7 +118,7 @@
     this.configure(getJsonGenerator(out, false));
     return this;
   }
-  
+
   /**
    * Reconfigures this JsonEncoder to output to the JsonGenerator provided.
    * <p/>
@@ -126,7 +126,7 @@
    * <p/>
    * Otherwise, this JsonEncoder will flush its current output and then
    * reconfigure its output to use the provided JsonGenerator.
-   * 
+   *
    * @param generator
    *          The JsonGenerator to direct output to. Cannot be null.
    * @throws IOException
@@ -184,8 +184,8 @@
   public void writeString(Utf8 utf8) throws IOException {
     writeString(utf8.toString());
   }
-  
-  @Override 
+
+  @Override
   public void writeString(String str) throws IOException {
     parser.advance(Symbol.STRING);
     if (parser.topSymbol() == Symbol.MAP_KEY_MARKER) {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/ParsingDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/ParsingDecoder.java
index db1790e..39e8aa3 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/ParsingDecoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/ParsingDecoder.java
@@ -35,7 +35,7 @@
   }
 
   protected abstract void skipFixed() throws IOException;
-  
+
   @Override
   public void skipAction() throws IOException {
     parser.popSymbol();
@@ -72,6 +72,6 @@
       skipMap();
     }
   }
-  
+
 }
 
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/ParsingEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/ParsingEncoder.java
index b6fe369..53a7833 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/ParsingEncoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/ParsingEncoder.java
@@ -30,7 +30,7 @@
    * the collections (array or map).
    */
   private long[] counts = new long[10];
-  
+
   protected int pos = -1;
 
   @Override
@@ -54,7 +54,7 @@
     }
     counts[pos] = 0;
   }
-  
+
   protected final void pop() {
     if (counts[pos] != 0) {
       throw new AvroTypeException("Incorrect number of items written. " +
@@ -62,7 +62,7 @@
     }
     pos--;
   }
-  
+
   protected final int depth() {
     return pos;
   }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java
index 2d7eba2..02cad1f 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java
@@ -43,12 +43,12 @@
 public class ResolvingDecoder extends ValidatingDecoder {
 
   private Decoder backup;
-  
+
   ResolvingDecoder(Schema writer, Schema reader, Decoder in)
     throws IOException {
     this(resolve(writer, reader), in);
   }
-  
+
   /**
    * Constructs a <tt>ResolvingDecoder</tt> using the given resolver.
    * The resolver must have been returned by a previous call to
@@ -68,7 +68,7 @@
    * returned Object is immutable and hence can be simultaneously used
    * in many ResolvingDecoders. This method is reasonably expensive, the
    * users are encouraged to cache the result.
-   * 
+   *
    * @param writer  The writer's schema. Cannot be null.
    * @param reader  The reader's schema. Cannot be null.
    * @return  The opaque reolver.
@@ -124,20 +124,20 @@
    * field values.)
    *
    * @throws AvroTypeException If we're not starting a new record
-   *                               
+   *
    */
   public final Schema.Field[] readFieldOrder() throws IOException {
     return ((Symbol.FieldOrderAction) parser.advance(Symbol.FIELD_ACTION)).
       fields;
   }
-  
+
   /**
    * Consume any more data that has been written by the writer but not
    * needed by the reader so that the the underlying decoder is in proper
    * shape for the next record. This situation happens when, for example,
    * the writer writes a record with two fields and the reader needs only the
    * first field.
-   * 
+   *
    * This function should be called after completely decoding an object but
    * before next object can be decoded from the same underlying decoder
    * either directly or through another resolving decoder. If the same resolving
@@ -175,7 +175,7 @@
       return (float) in.readFloat();
     }
   }
-  
+
   @Override
   public double readDouble() throws IOException {
     Symbol actual = parser.advance(Symbol.DOUBLE);
@@ -190,7 +190,7 @@
       return in.readDouble();
     }
   }
-  
+
   @Override
   public Utf8 readString(Utf8 old) throws IOException {
     Symbol actual = parser.advance(Symbol.STRING);
@@ -261,7 +261,7 @@
       throw new AvroTypeException((String) o);
     }
   }
-    
+
   @Override
   public int readIndex() throws IOException {
     parser.advance(Symbol.UNION);
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingDecoder.java
index ec4275c..3963dcf 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingDecoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingDecoder.java
@@ -50,7 +50,7 @@
   ValidatingDecoder(Schema schema, Decoder in) throws IOException {
     this(getSymbol(schema), in);
   }
-  
+
   private static Symbol getSymbol(Schema schema) {
     if (null == schema) {
       throw new NullPointerException("Schema cannot be null");
@@ -70,7 +70,7 @@
     parser.advance(Symbol.NULL);
     in.readNull();
   }
-    
+
   @Override
   public boolean readBoolean() throws IOException {
     parser.advance(Symbol.BOOLEAN);
@@ -82,7 +82,7 @@
     parser.advance(Symbol.INT);
     return in.readInt();
   }
-    
+
   @Override
   public long readLong() throws IOException {
     parser.advance(Symbol.LONG);
@@ -100,7 +100,7 @@
     parser.advance(Symbol.DOUBLE);
     return in.readDouble();
   }
-    
+
   @Override
   public Utf8 readString(Utf8 old) throws IOException {
     parser.advance(Symbol.STRING);
@@ -140,7 +140,7 @@
         top.size + " but received " + size + " bytes.");
     }
   }
-    
+
   @Override
   public void readFixed(byte[] bytes, int start, int len) throws IOException {
     checkFixed(len);
@@ -201,7 +201,7 @@
         parser.skipRepeater();
       }
     }
-    parser.advance(Symbol.ARRAY_END);    
+    parser.advance(Symbol.ARRAY_END);
     return 0;
   }
 
@@ -245,7 +245,7 @@
     parser.pushSymbol(top.getSymbol(result));
     return result;
   }
-  
+
   @Override
   public Symbol doAction(Symbol input, Symbol top) throws IOException {
     return null;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingEncoder.java
index 275df18..7d2777f 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingEncoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingEncoder.java
@@ -39,7 +39,7 @@
  * @see Encoder
  * @see EncoderFactory
  */
-public class ValidatingEncoder extends ParsingEncoder 
+public class ValidatingEncoder extends ParsingEncoder
   implements Parser.ActionHandler {
   protected Encoder out;
   protected final Parser parser;
@@ -70,7 +70,7 @@
     this.out = encoder;
     return this;
   }
-  
+
   @Override
   public void writeNull() throws IOException {
     parser.advance(Symbol.NULL);
@@ -112,13 +112,13 @@
     parser.advance(Symbol.STRING);
     out.writeString(utf8);
   }
-  
+
   @Override
   public void writeString(String str) throws IOException {
     parser.advance(Symbol.STRING);
     out.writeString(str);
   }
-  
+
   @Override
   public void writeString(CharSequence charSequence) throws IOException {
     parser.advance(Symbol.STRING);
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Parser.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Parser.java
index 796de3e..867e514 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Parser.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Parser.java
@@ -101,7 +101,7 @@
       }
     }
   }
-  
+
   /**
    * Performs any implicit actions at the top the stack, expanding any
    * production (other than the root) that may be encountered.
@@ -124,12 +124,12 @@
   }
 
   /**
-   * Performs any "trailing" implicit actions at the top the stack. 
+   * Performs any "trailing" implicit actions at the top the stack.
    */
   public final void processTrailingImplicitActions() throws IOException {
     while (pos >= 1) {
       Symbol top = stack[pos - 1];
-      if (top.kind == Symbol.Kind.IMPLICIT_ACTION 
+      if (top.kind == Symbol.Kind.IMPLICIT_ACTION
         && ((Symbol.ImplicitAction) top).isTrailing) {
         pos--;
         symbolHandler.doAction(null, top);
@@ -160,14 +160,14 @@
   public Symbol popSymbol() {
     return stack[--pos];
   }
-  
+
   /**
    * Returns the top symbol from the stack.
    */
   public Symbol topSymbol() {
     return stack[pos - 1];
   }
-  
+
   /**
    * Pushes <tt>sym</tt> on to the stack.
    */
@@ -177,7 +177,7 @@
     }
     stack[pos++] = sym;
   }
-  
+
   /**
    * Returns the depth of the stack.
    */
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
index 31f38de..cc028cf 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
@@ -39,29 +39,29 @@
 public class ResolvingGrammarGenerator extends ValidatingGrammarGenerator {
   /**
    * Resolves the writer schema <tt>writer</tt> and the reader schema
-   * <tt>reader</tt> and returns the start symbol for the grammar generated. 
+   * <tt>reader</tt> and returns the start symbol for the grammar generated.
    * @param writer    The schema used by the writer
    * @param reader    The schema used by the reader
    * @return          The start symbol for the resolving grammar
-   * @throws IOException 
+   * @throws IOException
    */
   public final Symbol generate(Schema writer, Schema reader)
     throws IOException {
     return Symbol.root(generate(writer, reader, new HashMap<LitS, Symbol>()));
   }
-  
+
   /**
    * Resolves the writer schema <tt>writer</tt> and the reader schema
    * <tt>reader</tt> and returns the start symbol for the grammar generated.
    * If there is already a symbol in the map <tt>seen</tt> for resolving the
    * two schemas, then that symbol is returned. Otherwise a new symbol is
-   * generated and returnd. 
+   * generated and returnd.
    * @param writer    The schema used by the writer
    * @param reader    The schema used by the reader
    * @param seen      The &lt;reader-schema, writer-schema&gt; to symbol
    * map of start symbols of resolving grammars so far.
    * @return          The start symbol for the resolving grammar
-   * @throws IOException 
+   * @throws IOException
    */
   public Symbol generate(Schema writer, Schema reader,
                                 Map<LitS, Symbol> seen) throws IOException
@@ -108,7 +108,7 @@
                 generate(writer.getElementType(),
                 reader.getElementType(), seen)),
             Symbol.ARRAY_START);
-      
+
       case MAP:
         return Symbol.seq(Symbol.repeat(Symbol.MAP_END,
                 generate(writer.getValueType(),
@@ -125,7 +125,7 @@
       if (writerType == Schema.Type.UNION) {
         return resolveUnion(writer, reader, seen);
       }
-  
+
       switch (readerType) {
       case LONG:
         switch (writerType) {
@@ -133,7 +133,7 @@
           return Symbol.resolve(super.generate(writer, seen), Symbol.LONG);
         }
         break;
-  
+
       case FLOAT:
         switch (writerType) {
         case INT:
@@ -141,7 +141,7 @@
           return Symbol.resolve(super.generate(writer, seen), Symbol.FLOAT);
         }
         break;
-  
+
       case DOUBLE:
         switch (writerType) {
         case INT:
@@ -150,21 +150,21 @@
           return Symbol.resolve(super.generate(writer, seen), Symbol.DOUBLE);
         }
         break;
-  
+
       case BYTES:
         switch (writerType) {
         case STRING:
           return Symbol.resolve(super.generate(writer, seen), Symbol.BYTES);
         }
         break;
-  
+
       case STRING:
         switch (writerType) {
         case BYTES:
           return Symbol.resolve(super.generate(writer, seen), Symbol.STRING);
         }
         break;
-  
+
       case UNION:
         int j = bestBranch(reader, writer, seen);
         if (j >= 0) {
@@ -308,7 +308,7 @@
     e.flush();
     return out.toByteArray();
   }
-  
+
   /**
    * Encodes the given Json node <tt>n</tt> on to the encoder <tt>e</tt>
    * according to the schema <tt>s</tt>.
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/SkipParser.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/SkipParser.java
index 5afbe60..baf4e94 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/SkipParser.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/SkipParser.java
@@ -39,7 +39,7 @@
      */
     void skipTopSymbol() throws IOException;
   }
-  
+
   private final SkipHandler skipHandler;
 
   public SkipParser(Symbol root, ActionHandler symbolHandler,
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java
index 80ae644..178aa0b 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java
@@ -60,7 +60,7 @@
    * the symbols that forms the production for this symbol. The
    * sequence is in the reverse order of production. This is useful
    * for easy copying onto parsing stack.
-   * 
+   *
    * Please note that this is a final. So the production for a symbol
    * should be known before that symbol is constructed. This requirement
    * cannot be met for those symbols which are recursive (e.g. a record that
@@ -77,8 +77,8 @@
   protected Symbol(Kind kind) {
     this(kind, null);
   }
-    
-    
+
+
   protected Symbol(Kind kind, Symbol[] production) {
     this.production = production;
     this.kind = kind;
@@ -120,7 +120,7 @@
   static Symbol error(String e) {
     return new ErrorAction(e);
   }
-  
+
   /**
    * A convenience method to construct a ResolvingAction.
    * @param w The writer symbol
@@ -129,32 +129,32 @@
   static Symbol resolve(Symbol w, Symbol r) {
     return new ResolvingAction(w, r);
   }
-  
+
   private static class Fixup {
     public final Symbol[] symbols;
     public final int pos;
-    
+
     public Fixup(Symbol[] symbols, int pos) {
       this.symbols = symbols;
       this.pos = pos;
     }
   }
-  
+
   public Symbol flatten(Map<Sequence, Sequence> map,
       Map<Sequence, List<Fixup>> map2) {
     return this;
   }
-  
+
   public int flattenedSize() {
     return 1;
   }
-  
+
   /**
    * Flattens the given sub-array of symbols into an sub-array of symbols. Every
    * <tt>Sequence</tt> in the input are replaced by its production recursively.
    * Non-<tt>Sequence</tt> symbols, they internally have other symbols
    * those internal symbols also get flattened.
-   * 
+   *
    * The algorithm does a few tricks to handle recursive symbol definitions.
    * In order to avoid infinite recursion with recursive symbols, we have a map
    * of Symbol->Symbol. Before fully constructing a flattened symbol for a
@@ -168,7 +168,7 @@
    * has not not be fully constructed yet, we copy a bunch of <tt>null</tt>s.
    * Fix-up remembers all those <tt>null</tt> patches. The fix-ups gets finally
    * filled when we know the symbols to occupy those patches.
-   *  
+   *
    * @param in  The array of input symbols to flatten
    * @param start The position where the input sub-array starts.
    * @param out The output that receives the flattened list of symbols. The
@@ -246,7 +246,7 @@
 
   public static class ImplicitAction extends Symbol {
     /**
-     * Set to <tt>true</tt> if and only if this implicit action is 
+     * Set to <tt>true</tt> if and only if this implicit action is
      * a trailing action. That is, it is an action that follows
      * real symbol. E.g {@link Symbol#DEFAULT_END_ACTION}.
      */
@@ -255,13 +255,13 @@
     private ImplicitAction() {
       this(false);
     }
-    
+
     private ImplicitAction(boolean isTrailing) {
       super(Kind.IMPLICIT_ACTION);
       this.isTrailing = isTrailing;
     }
   }
-  
+
   protected static class Root extends Symbol {
     private Root(Symbol... symbols) {
       super(Kind.ROOT, makeProduction(symbols));
@@ -276,7 +276,7 @@
       return result;
     }
   }
-  
+
   protected static class Sequence extends Symbol implements Iterable<Symbol> {
     private Sequence(Symbol[] productions) {
       super(Kind.SEQUENCE, productions);
@@ -285,19 +285,19 @@
     public Symbol get(int index) {
       return production[index];
     }
-    
+
     public int size() {
       return production.length;
     }
-    
+
     public Iterator<Symbol> iterator() {
       return new Iterator<Symbol>() {
         private int pos = production.length;
-        
+
         public boolean hasNext() {
           return 0 < pos;
         }
-        
+
         public Symbol next() {
           if (0 < pos) {
             return production[--pos];
@@ -305,7 +305,7 @@
             throw new NoSuchElementException();
           }
         }
-        
+
         public void remove() {
           throw new UnsupportedOperationException();
         }
@@ -320,7 +320,7 @@
         map.put(this, result);
         List<Fixup> l = new ArrayList<Fixup>();
         map2.put(result, l);
-        
+
         flatten(production, 0,
             result.production, 0, map, map2);
         for (Fixup f : l) {
@@ -340,19 +340,19 @@
 
   public static class Repeater extends Symbol {
     public final Symbol end;
-   
+
     private Repeater(Symbol end, Symbol... sequenceToRepeat) {
       super(Kind.REPEATER, makeProduction(sequenceToRepeat));
       this.end = end;
       production[0] = this;
     }
-    
+
     private static Symbol[] makeProduction(Symbol[] p) {
       Symbol[] result = new Symbol[p.length + 1];
       System.arraycopy(p, 0, result, 1, p.length);
       return result;
     }
-    
+
     @Override
     public Repeater flatten(Map<Sequence, Sequence> map,
         Map<Sequence, List<Fixup>> map2) {
@@ -363,9 +363,9 @@
     }
 
   }
-  
+
   /**
-   * Returns true if the Parser contains any Error symbol, indicating that it may fail 
+   * Returns true if the Parser contains any Error symbol, indicating that it may fail
    * for some inputs.
    */
   public static boolean hasErrors(Symbol symbol) {
@@ -388,7 +388,7 @@
       throw new RuntimeException("unknown symbol kind: " + symbol.kind);
     }
   }
-  
+
   private static boolean hasErrors(Symbol root, Symbol[] symbols) {
     if(null != symbols) {
       for(Symbol s: symbols) {
@@ -402,7 +402,7 @@
     }
     return false;
   }
-    
+
   public static class Alternative extends Symbol {
     public final Symbol[] symbols;
     public final String[] labels;
@@ -411,15 +411,15 @@
       this.symbols = symbols;
       this.labels = labels;
     }
-    
+
     public Symbol getSymbol(int index) {
       return symbols[index];
     }
-    
+
     public String getLabel(int index) {
       return labels[index];
     }
-    
+
     public int size() {
       return symbols.length;
     }
@@ -468,7 +468,7 @@
   public static EnumAdjustAction enumAdjustAction(int rsymCount, Object[] adj) {
     return new EnumAdjustAction(rsymCount, adj);
   }
-  
+
   public static class EnumAdjustAction extends IntCheckAction {
     public final Object[] adjustments;
     @Deprecated public EnumAdjustAction(int rsymCount, Object[] adjustments) {
@@ -492,7 +492,7 @@
       this.writer = writer;
       this.reader = reader;
     }
-    
+
     @Override
     public ResolvingAction flatten(Map<Sequence, Sequence> map,
         Map<Sequence, List<Fixup>> map2) {
@@ -501,7 +501,7 @@
     }
 
   }
-  
+
   public static SkipAction skipAction(Symbol symToSkip) {
     return new SkipAction(symToSkip);
   }
@@ -512,7 +512,7 @@
       super(true);
       this.symToSkip = symToSkip;
     }
-    
+
     @Override
     public SkipAction flatten(Map<Sequence, Sequence> map,
         Map<Sequence, List<Fixup>> map2) {
@@ -524,7 +524,7 @@
   public static FieldAdjustAction fieldAdjustAction(int rindex, String fname) {
     return new FieldAdjustAction(rindex, fname);
   }
-  
+
   public static class FieldAdjustAction extends ImplicitAction {
     public final int rindex;
     public final String fname;
@@ -533,7 +533,7 @@
       this.fname = fname;
     }
   }
-  
+
   public static FieldOrderAction fieldOrderAction(Schema.Field[] fields) {
     return new FieldOrderAction(fields);
   }
@@ -567,13 +567,13 @@
       this.rindex = rindex;
       this.symToParse = symToParse;
     }
-    
+
     @Override
     public UnionAdjustAction flatten(Map<Sequence, Sequence> map,
         Map<Sequence, List<Fixup>> map2) {
       return new UnionAdjustAction(rindex, symToParse.flatten(map, map2));
     }
-    
+
   }
 
   /** For JSON. */
@@ -587,11 +587,11 @@
       super(symbols.size());
       this.symbols = symbols;
     }
-    
+
     public String getLabel(int n) {
       return symbols.get(n);
     }
-    
+
     public int findLabel(String l) {
       if (l != null) {
         for (int i = 0; i < symbols.size(); i++) {
@@ -633,7 +633,7 @@
   public static final Symbol RECORD_END = new ImplicitAction(true);
   public static final Symbol UNION_END = new ImplicitAction(true);
   public static final Symbol FIELD_END = new ImplicitAction(true);
-  
+
   public static final Symbol DEFAULT_END_ACTION = new ImplicitAction(true);
   public static final Symbol MAP_KEY_MARKER =
     new Symbol.Terminal("map-key-marker");
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ValidatingGrammarGenerator.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ValidatingGrammarGenerator.java
index 6bd1b26..962fc10 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ValidatingGrammarGenerator.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ValidatingGrammarGenerator.java
@@ -101,7 +101,7 @@
       List<Schema> subs = sc.getTypes();
       Symbol[] symbols = new Symbol[subs.size()];
       String[] labels = new String[subs.size()];
-      
+
       int i = 0;
       for (Schema b : sc.getTypes()) {
         symbols[i] = generate(b, seen);
@@ -119,7 +119,7 @@
   static class LitS {
     public final Schema actual;
     public LitS(Schema actual) { this.actual = actual; }
-    
+
     /**
      * Two LitS are equal if and only if their underlying schema is
      * the same (not merely equal).
@@ -128,7 +128,7 @@
       if (! (o instanceof LitS)) return false;
       return actual == ((LitS)o).actual;
     }
-    
+
     public int hashCode() {
       return actual.hashCode();
     }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroAlias.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroAlias.java
index bcd0fd1..d613350 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroAlias.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroAlias.java
@@ -25,7 +25,7 @@
 /**
  * Adds the given name and space as an alias to the schema.
  * Avro files of this schema can be read into classes
- * named by the alias. 
+ * named by the alias.
  */
 @Retention(RetentionPolicy.RUNTIME)
 @Target(ElementType.TYPE)
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/CustomEncoding.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/CustomEncoding.java
index 1c180e5..66421c5 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/CustomEncoding.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/CustomEncoding.java
@@ -25,10 +25,10 @@
 
 /**
  * Expert:  a custom encoder and decoder that writes
- * an object directly to avro. 
+ * an object directly to avro.
  * No validation is performed to check that the encoding conforms to the schema.
  * Invalid implementations may result in an unreadable file.
- * The use of {@link org.apache.avro.io.ValidatingEncoder} is recommended. 
+ * The use of {@link org.apache.avro.io.ValidatingEncoder} is recommended.
  *
  * @param <T> The class of objects that can be serialized with this encoder / decoder.
  */
@@ -36,7 +36,7 @@
 
   protected Schema schema;
 
-  
+
   protected abstract void write(Object datum, Encoder out) throws IOException;
 
   protected abstract T read(Object reuse, Decoder in) throws IOException;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccess.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccess.java
index ba739bc..ffc6f3c 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccess.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccess.java
@@ -20,7 +20,7 @@
 import java.lang.reflect.Field;
 
 abstract class FieldAccess {
-  
+
   protected abstract FieldAccessor getAccessor(Field field);
 
 }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessReflect.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessReflect.java
index 680139a..fd544ec 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessReflect.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessReflect.java
@@ -47,7 +47,7 @@
       this.field = field;
       this.field.setAccessible(true);
       isStringable = field.isAnnotationPresent(Stringable.class);
-      isCustomEncoded = field.isAnnotationPresent(AvroEncode.class); 
+      isCustomEncoded = field.isAnnotationPresent(AvroEncode.class);
     }
 
     @Override
@@ -65,17 +65,17 @@
         IOException {
       field.set(object, value);
     }
-    
+
     @Override
     protected Field getField() {
       return field;
     }
-    
+
     @Override
     protected boolean isStringable() {
       return isStringable;
     }
-    
+
     @Override
     protected boolean isCustomEncoded() {
       return isCustomEncoded;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessUnsafe.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessUnsafe.java
index b5ade1e..3ae25fd 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessUnsafe.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessUnsafe.java
@@ -92,7 +92,7 @@
     protected boolean supportsIO() {
       return true;
     }
-    
+
     @Override
     protected boolean isStringable() {
       return isStringable;
@@ -321,18 +321,18 @@
     protected Object get(Object object) {
       return UNSAFE.getObject(object, offset);
     }
-    
+
     @Override
     protected boolean supportsIO() {
       return false;
     }
-    
+
   }
-  
+
   final static class UnsafeCustomEncodedField extends UnsafeCachedField {
 
     private CustomEncoding<?> encoding;
-    
+
     UnsafeCustomEncodedField(Field f, CustomEncoding<?> encoding) {
       super(f);
       this.encoding = encoding;
@@ -347,7 +347,7 @@
     protected void set(Object object, Object value) throws IllegalAccessException, IOException {
       UNSAFE.putObject(object, offset, value);
     }
-    
+
     @Override
     protected void read(Object object, Decoder in) throws IOException {
       UNSAFE.putObject(object, offset, encoding.read(in));
@@ -357,7 +357,7 @@
     protected void write(Object object, Encoder out) throws IOException {
       encoding.write(UNSAFE.getObject(object, offset), out);
     }
-    
+
     protected boolean isCustomEncoded() {
       return true;
     }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessor.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessor.java
index b76d185..9d71147 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessor.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessor.java
@@ -41,15 +41,15 @@
   protected boolean supportsIO() {
     return false;
   }
-  
+
   protected abstract Field getField();
-  
+
   protected boolean isStringable() {
     return false;
   }
-  
+
   protected boolean isCustomEncoded() {
     return false;
   }
-  
+
 }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java
index 5af55b7..6b6ae4e 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java
@@ -87,17 +87,17 @@
       return makeNullable(schema);
     }
   }
-  
+
   private static final ReflectData INSTANCE = new ReflectData();
 
   /** For subclasses.  Applications normally use {@link ReflectData#get()}. */
   public ReflectData() {}
-  
+
   /** Construct with a particular classloader. */
   public ReflectData(ClassLoader classLoader) {
     super(classLoader);
   }
-  
+
   /** Return the singleton instance. */
   public static ReflectData get() { return INSTANCE; }
 
@@ -148,7 +148,7 @@
   public Object getField(Object record, String name, int position) {
     return getField(record, name, position, null);
   }
-  
+
   @Override
   protected Object getField(Object record, String name, int pos, Object state) {
     if (record instanceof IndexedRecord) {
@@ -160,7 +160,7 @@
       throw new AvroRuntimeException(e);
     }
   }
-    
+
   private FieldAccessor getAccessorForField(Object record, String name,
       int pos, Object optionalState) {
     if (optionalState != null) {
@@ -227,8 +227,8 @@
       return super.validate(schema, datum);
     }
   }
-  
-  static final ConcurrentHashMap<Class<?>, ClassAccessorData> 
+
+  static final ConcurrentHashMap<Class<?>, ClassAccessorData>
     ACCESSOR_CACHE = new ConcurrentHashMap<Class<?>, ClassAccessorData>();
 
   static class ClassAccessorData {
@@ -244,14 +244,14 @@
           continue;
         }
         FieldAccessor accessor = ReflectionUtil.getFieldAccess().getAccessor(f);
-        AvroName avroname = f.getAnnotation(AvroName.class);    
-        byName.put( (avroname != null 
+        AvroName avroname = f.getAnnotation(AvroName.class);
+        byName.put( (avroname != null
           ? avroname.value()
-          : f.getName()) , accessor);  
+          : f.getName()) , accessor);
       }
     }
-    
-    /** 
+
+    /**
      * Return the field accessors as an array, indexed by the field
      * index of the given schema.
      */
@@ -282,7 +282,7 @@
       return result;
     }
   }
-  
+
   private ClassAccessorData getClassAccessorData(Class<?> c) {
     ClassAccessorData data = ACCESSOR_CACHE.get(c);
     if(data == null && !IndexedRecord.class.isAssignableFrom(c)){
@@ -294,7 +294,7 @@
     }
     return data;
   }
-  
+
   private FieldAccessor[] getFieldAccessors(Class<?> c, Schema s) {
     ClassAccessorData data = getClassAccessorData(c);
     if (data != null) {
@@ -302,7 +302,7 @@
     }
     return null;
   }
-  
+
   private FieldAccessor getFieldAccessor(Class<?> c, String fieldName) {
     ClassAccessorData data = getClassAccessorData(c);
     if (data != null) {
@@ -431,9 +431,9 @@
                                   Map<String, Schema> names) {
     Schema keySchema = createSchema(keyType, names);
     Schema valueSchema = createSchema(valueType, names);
-    Schema.Field keyField = 
+    Schema.Field keyField =
       new Schema.Field(NS_MAP_KEY, keySchema, null, null);
-    Schema.Field valueField = 
+    Schema.Field valueField =
       new Schema.Field(NS_MAP_VALUE, valueSchema, null, null);
     String name = getNameForNonStringMapRecord(keyType, valueType,
       keySchema, valueSchema);
@@ -493,7 +493,7 @@
     if (type instanceof GenericArrayType) {                  // generic array
       Type component = ((GenericArrayType)type).getGenericComponentType();
       if (component == Byte.TYPE)                            // byte array
-        return Schema.create(Schema.Type.BYTES);           
+        return Schema.create(Schema.Type.BYTES);
       Schema result = Schema.createArray(createSchema(component, names));
       setElement(result, component);
       return result;
@@ -534,10 +534,10 @@
     } else if (type instanceof Class) {                      // Class
       Class<?> c = (Class<?>)type;
       if (c.isPrimitive() ||                                 // primitives
-          c == Void.class || c == Boolean.class || 
+          c == Void.class || c == Boolean.class ||
           c == Integer.class || c == Long.class ||
-          c == Float.class || c == Double.class || 
-          c == Byte.class || c == Short.class || 
+          c == Float.class || c == Double.class ||
+          c == Byte.class || c == Short.class ||
           c == Character.class)
         return super.createSchema(type, names);
       if (c.isArray()) {                                     // array
@@ -599,7 +599,7 @@
           consumeAvroAliasAnnotation(c, schema);
           names.put(c.getName(), schema);
           for (Field field : getCachedFields(c))
-            if ((field.getModifiers()&(Modifier.TRANSIENT|Modifier.STATIC))==0 
+            if ((field.getModifiers()&(Modifier.TRANSIENT|Modifier.STATIC))==0
                 && !field.isAnnotationPresent(AvroIgnore.class)) {
               Schema fieldSchema = createFieldSchema(field, names);
               AvroDefault defaultAnnotation
@@ -607,7 +607,7 @@
               JsonNode defaultValue = (defaultAnnotation == null)
                 ? null
                 : Schema.parseJson(defaultAnnotation.value());
-              
+
               if (defaultValue == null
                   && fieldSchema.getType() == Schema.Type.UNION) {
                 Schema defaultType = fieldSchema.getTypes().get(0);
@@ -616,17 +616,17 @@
                 }
               }
               AvroName annotatedName = field.getAnnotation(AvroName.class);       // Rename fields
-              String fieldName = (annotatedName != null)            
+              String fieldName = (annotatedName != null)
                 ? annotatedName.value()
                 : field.getName();
-              Schema.Field recordField 
+              Schema.Field recordField
                 = new Schema.Field(fieldName, fieldSchema, null, defaultValue);
-             
+
               AvroMeta meta = field.getAnnotation(AvroMeta.class);              // add metadata
-              if (meta != null) 
-                recordField.addProp(meta.key(), meta.value());  
-              for(Schema.Field f : fields) {                                
-                if (f.name().equals(fieldName)) 
+              if (meta != null)
+                recordField.addProp(meta.key(), meta.value());
+              for(Schema.Field f : fields) {
+                if (f.name().equals(fieldName))
                   throw new AvroTypeException("double field entry: "+ fieldName);
               }
               fields.add(recordField);
@@ -636,7 +636,7 @@
                                         null, null));
           schema.setFields(fields);
           AvroMeta meta = c.getAnnotation(AvroMeta.class);
-          if (meta != null) 
+          if (meta != null)
               schema.addProp(meta.key(), meta.value());
         }
         names.put(fullName, schema);
@@ -654,7 +654,7 @@
     makeNullable(Schema.create(Schema.Type.STRING));
 
   // if array element type is a class with a union annotation, note it
-  // this is required because we cannot set a property on the union itself 
+  // this is required because we cannot set a property on the union itself
   private void setElement(Schema schema, Type element) {
     if (!(element instanceof Class)) return;
     Class<?> c = (Class<?>)element;
@@ -694,7 +694,7 @@
 
   private static final Map<Class<?>,Field[]> FIELDS_CACHE =
     new ConcurrentHashMap<Class<?>,Field[]>();
-  
+
   // Return of this class and its superclasses to serialize.
   private static Field[] getCachedFields(Class<?> recordClass) {
     Field[] fieldsList = FIELDS_CACHE.get(recordClass);
@@ -722,7 +722,7 @@
     fieldsList = fields.values().toArray(new Field[0]);
     return fieldsList;
   }
-  
+
   /** Create a schema for a field. */
   protected Schema createFieldSchema(Field field, Map<String, Schema> names) {
     AvroEncode enc = field.getAnnotation(AvroEncode.class);
@@ -731,7 +731,7 @@
           return enc.using().newInstance().getSchema();
       } catch (Exception e) {
           throw new AvroRuntimeException("Could not create schema from custom serializer for " + field.getName());
-      } 
+      }
 
     AvroSchema explicit = field.getAnnotation(AvroSchema.class);
     if (explicit != null)                                   // explicit schema
@@ -816,7 +816,7 @@
     List<Schema> errs = new ArrayList<Schema>();
     errs.add(Protocol.SYSTEM_ERROR);              // every method can throw
     for (Type err : method.getGenericExceptionTypes())
-      if (err != AvroRemoteException.class) 
+      if (err != AvroRemoteException.class)
         errs.add(getSchema(err, names));
     Schema errors = Schema.createUnion(errs);
     return protocol.createMessage(method.getName(), null /* doc */, request, response, errors);
@@ -851,8 +851,8 @@
     case BYTES:
       if (!o1.getClass().isArray())
         break;
-      byte[] b1 = (byte[])o1; 
-      byte[] b2 = (byte[])o2; 
+      byte[] b1 = (byte[])o1;
+      byte[] b2 = (byte[])o2;
       return BinaryData.compareBytes(b1, 0, b1.length, b2, 0, b2.length);
     }
     return super.compare(o1, o2, s, equals);
@@ -862,7 +862,7 @@
   protected Object getRecordState(Object record, Schema schema) {
     return getFieldAccessors(record.getClass(), schema);
   }
-  
+
   private void consumeAvroAliasAnnotation(Class<?> c, Schema schema) {
     AvroAlias alias = c.getAnnotation(AvroAlias.class);
     if (alias != null) {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumReader.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumReader.java
index ee12180..3d5a301 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumReader.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumReader.java
@@ -275,13 +275,13 @@
         if (accessor.isStringable()) {
           try {
             String asString = (String) read(null, f.schema(), in);
-            accessor.set(record, asString == null 
+            accessor.set(record, asString == null
               ? null
               : newInstanceFromString(accessor.getField().getType(), asString));
             return;
           } catch (Exception e) {
             throw new AvroRuntimeException("Failed to read Stringable", e);
-          } 
+          }
         }
         LogicalType logicalType = f.schema().getLogicalType();
         if (logicalType != null) {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumWriter.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumWriter.java
index 85e3c55..fbdb9a5 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumWriter.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumWriter.java
@@ -51,7 +51,7 @@
   public ReflectDatumWriter(Schema root, ReflectData reflectData) {
     super(root, reflectData);
   }
-  
+
   protected ReflectDatumWriter(ReflectData reflectData) {
     super(reflectData);
   }
@@ -69,7 +69,7 @@
     if (null == elementClass) {
       // not a Collection or an Array
       throw new AvroRuntimeException("Array data must be a Collection or Array");
-    } 
+    }
     Schema element = schema.getElementType();
     if (elementClass.isPrimitive()) {
       Schema.Type type = element.getType();
@@ -109,7 +109,7 @@
       out.writeArrayEnd();
     }
   }
-  
+
   private void writeObjectArray(Schema element, Object[] data, Encoder out) throws IOException {
     int size = data.length;
     out.setItemCount(size);
@@ -117,12 +117,12 @@
       this.write(element, data[i], out);
     }
   }
-    
+
   private void arrayError(Class<?> cl, Schema.Type type) {
     throw new AvroRuntimeException("Error writing array with inner type " +
       cl + " and avro type: " + type);
   }
-  
+
   @Override
   protected void writeBytes(Object datum, Encoder out) throws IOException {
     if (datum instanceof byte[])
@@ -176,7 +176,7 @@
             throw new AvroRuntimeException("Failed to write Stringable", e);
           }
           return;
-        }  
+        }
       }
     }
     super.writeField(record, f, out, state);
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectionUtil.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectionUtil.java
index 1457cdb..236de41 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectionUtil.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectionUtil.java
@@ -22,10 +22,10 @@
 /**
  * A few utility methods for using @link{java.misc.Unsafe}, mostly for private
  * use.
- * 
+ *
  * Use of Unsafe on Android is forbidden, as Android provides only a very
  * limited functionality for this class compared to the JDK version.
- * 
+ *
  */
 
 class ReflectionUtil {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/AvroGenerated.java b/lang/java/avro/src/main/java/org/apache/avro/specific/AvroGenerated.java
index e57d27e..d040431 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/AvroGenerated.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/AvroGenerated.java
@@ -23,8 +23,8 @@
 import java.lang.annotation.Target;
 
 /**
- * Indicates that an annotated class is an Avro generated class. All Avro 
- * generated classes will be annotated with this annotation. 
+ * Indicates that an annotated class is an Avro generated class. All Avro
+ * generated classes will be annotated with this annotation.
  */
 @Target(ElementType.TYPE)
 @Retention(RetentionPolicy.RUNTIME)
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java
index 647f823..ec2e33f 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java
@@ -50,7 +50,7 @@
 public class SpecificData extends GenericData {
 
   private static final SpecificData INSTANCE = new SpecificData();
-  
+
   private static final Class<?>[] NO_ARG = new Class[]{};
   private static final Class<?>[] SCHEMA_ARG = new Class[]{Schema.class};
   private static final Map<Class,Constructor> CTOR_CACHE =
@@ -101,7 +101,7 @@
   public SpecificData(ClassLoader classLoader) {
     super(classLoader);
   }
-  
+
   @Override
   public DatumReader createDatumReader(Schema schema) {
     return new SpecificDatumReader(schema, schema, this);
@@ -293,7 +293,7 @@
     return super.getSchemaName(datum);
   }
 
-  /** True iff a class should be serialized with toString(). */ 
+  /** True iff a class should be serialized with toString(). */
   protected boolean isStringable(Class<?> c) {
     return stringableClasses.contains(c);
   }
@@ -324,7 +324,7 @@
       return super.compare(o1, o2, s, eq);
     }
   }
-  
+
   /** Create an instance of a class.  If the class implements {@link
    * SchemaConstructable}, call a constructor with a {@link
    * org.apache.avro.Schema} parameter, otherwise use a no-arg constructor. */
@@ -345,14 +345,14 @@
     }
     return result;
   }
-  
+
   @Override
   public Object createFixed(Object old, Schema schema) {
     Class c = getClass(schema);
     if (c == null) return super.createFixed(old, schema); // punt to generic
     return c.isInstance(old) ? old : newInstance(c, schema);
   }
-  
+
   @Override
   public Object newRecord(Object old, Schema schema) {
     Class c = getClass(schema);
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumReader.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumReader.java
index ceffdd6..870d16f 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumReader.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumReader.java
@@ -79,7 +79,7 @@
     case STRING:
       stringClass = getPropAsClass(schema, SpecificData.CLASS_PROP);
       break;
-    case MAP: 
+    case MAP:
       stringClass = getPropAsClass(schema, SpecificData.KEY_CLASS_PROP);
       break;
     }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumWriter.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumWriter.java
index 49dddbb..128b02e 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumWriter.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumWriter.java
@@ -32,19 +32,19 @@
   public SpecificDatumWriter(Class<T> c) {
     super(SpecificData.get().getSchema(c), SpecificData.get());
   }
-  
+
   public SpecificDatumWriter(Schema schema) {
     super(schema, SpecificData.get());
   }
-  
+
   public SpecificDatumWriter(Schema root, SpecificData specificData) {
     super(root, specificData);
   }
-  
+
   protected SpecificDatumWriter(SpecificData specificData) {
     super(specificData);
   }
-  
+
   /** Returns the {@link SpecificData} implementation used by this writer. */
   public SpecificData getSpecificData() {
     return (SpecificData) getData();
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificErrorBuilderBase.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificErrorBuilderBase.java
index f95f42c..961f030 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificErrorBuilderBase.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificErrorBuilderBase.java
@@ -23,18 +23,18 @@
 import org.apache.avro.data.ErrorBuilder;
 import org.apache.avro.data.RecordBuilderBase;
 
-/** 
+/**
  * Abstract base class for specific ErrorBuilder implementations.
  * Not thread-safe.
  */
-abstract public class SpecificErrorBuilderBase<T extends SpecificExceptionBase> 
+abstract public class SpecificErrorBuilderBase<T extends SpecificExceptionBase>
   extends RecordBuilderBase<T> implements ErrorBuilder<T> {
   private Constructor<T> errorConstructor;
   private Object value;
   private boolean hasValue;
   private Throwable cause;
   private boolean hasCause;
-  
+
   /**
    * Creates a SpecificErrorBuilderBase for building errors of the given type.
    * @param schema the schema associated with the error class.
@@ -42,7 +42,7 @@
   protected SpecificErrorBuilderBase(Schema schema) {
     super(schema, SpecificData.get());
   }
-  
+
   /**
    * SpecificErrorBuilderBase copy constructor.
    * @param other SpecificErrorBuilderBase instance to copy.
@@ -55,19 +55,19 @@
     this.cause = other.cause;
     this.hasCause = other.hasCause;
   }
-  
+
   /**
    * Creates a SpecificErrorBuilderBase by copying an existing error instance.
    * @param other the error instance to copy.
    */
   protected SpecificErrorBuilderBase(T other) {
     super(other.getSchema(), SpecificData.get());
-    
+
     Object otherValue = other.getValue();
     if (otherValue != null) {
       setValue(otherValue);
     }
-    
+
     Throwable otherCause = other.getCause();
     if (otherCause != null) {
       setCause(otherCause);
@@ -85,12 +85,12 @@
     hasValue = true;
     return this;
   }
-  
+
   @Override
   public boolean hasValue() {
     return hasValue;
   }
-  
+
   @Override
   public SpecificErrorBuilderBase<T> clearValue() {
     value = null;
@@ -109,12 +109,12 @@
     hasCause = true;
     return this;
   }
-  
+
   @Override
   public boolean hasCause() {
     return hasCause;
   }
-  
+
   @Override
   public SpecificErrorBuilderBase<T> clearCause() {
     cause = null;
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificExceptionBase.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificExceptionBase.java
index bf07e30..86d0a69 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificExceptionBase.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificExceptionBase.java
@@ -33,7 +33,7 @@
   public SpecificExceptionBase() {
     super();
   }
-  
+
   public SpecificExceptionBase(Throwable value) {
     super(value);
   }
@@ -41,7 +41,7 @@
   public SpecificExceptionBase(Object value) {
     super(value);
   }
-  
+
   public SpecificExceptionBase(Object value, Throwable cause) {
     super(value, cause);
   }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificFixed.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificFixed.java
index 9ff9093..0802b0d 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificFixed.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificFixed.java
@@ -35,9 +35,9 @@
   public SpecificFixed() {
     bytes(new byte[getSchema().getFixedSize()]);
   }
-  
+
   public SpecificFixed(byte[] bytes) {
-    bytes(bytes); 
+    bytes(bytes);
   }
 
   public void bytes(byte[] bytes) { this.bytes = bytes; }
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java
index 51ee653..77d0928 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java
@@ -51,7 +51,7 @@
     if (this.getClass() != that.getClass()) return false; // not same schema
     return SpecificData.get().compare(this, that, this.getSchema(), true) == 0;
   }
-    
+
   @Override
   public int hashCode() {
     return SpecificData.get().hashCode(this, this.getSchema());
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBuilderBase.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBuilderBase.java
index d508dce..d157434 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBuilderBase.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBuilderBase.java
@@ -24,9 +24,9 @@
  * Abstract base class for specific RecordBuilder implementations.
  * Not thread-safe.
  */
-abstract public class SpecificRecordBuilderBase<T extends SpecificRecord> 
+abstract public class SpecificRecordBuilderBase<T extends SpecificRecord>
   extends RecordBuilderBase<T> {
-  
+
   /**
    * Creates a SpecificRecordBuilderBase for building records of the given type.
    * @param schema the schema associated with the record class.
@@ -34,7 +34,7 @@
   protected SpecificRecordBuilderBase(Schema schema) {
     super(schema, SpecificData.get());
   }
-  
+
   /**
    * SpecificRecordBuilderBase copy constructor.
    * @param other SpecificRecordBuilderBase instance to copy.
@@ -42,7 +42,7 @@
   protected SpecificRecordBuilderBase(SpecificRecordBuilderBase<T> other) {
     super(other, SpecificData.get());
   }
-  
+
   /**
    * Creates a SpecificRecordBuilderBase by copying an existing record instance.
    * @param other the record instance to copy.
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferOutputStream.java b/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferOutputStream.java
index 2feb699..498a6d4 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferOutputStream.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferOutputStream.java
@@ -51,7 +51,7 @@
     }
     buffers.addAll(0, lists);
   }
-  
+
   /** Append a list of ByteBuffers to this stream. */
   public void append(List<ByteBuffer> lists) {
     for (ByteBuffer buffer: lists) {
@@ -59,7 +59,7 @@
     }
     buffers.addAll(lists);
   }
-  
+
   public void reset() {
     buffers = new LinkedList<ByteBuffer>();
     buffers.add(ByteBuffer.allocate(BUFFER_SIZE));
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/Utf8.java b/lang/java/avro/src/main/java/org/apache/avro/util/Utf8.java
index 061d160..27d11a3 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/util/Utf8.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/Utf8.java
@@ -40,7 +40,7 @@
     this.length = bytes.length;
     this.string = string;
   }
-  
+
   public Utf8(Utf8 other) {
     this.length = other.length;
     this.bytes = new byte[other.length];
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java b/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java
index d65227c..a22708a 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java
@@ -31,14 +31,14 @@
  * Implements a combination of WeakHashMap and IdentityHashMap.
  * Useful for caches that need to key off of a == comparison
  * instead of a .equals.
- * 
+ *
  * <b>
  * This class is not a general-purpose Map implementation! While
  * this class implements the Map interface, it intentionally violates
  * Map's general contract, which mandates the use of the equals method
  * when comparing objects. This class is designed for use only in the
  * rare cases wherein reference-equality semantics are required.
- * 
+ *
  * Note that this implementation is not synchronized.
  * </b>
  */
@@ -144,7 +144,7 @@
 
   class IdentityWeakReference extends WeakReference<K> {
     int hash;
-        
+
     @SuppressWarnings("unchecked")
       IdentityWeakReference(Object obj) {
       super((K)obj, queue);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/AvroTestUtil.java b/lang/java/avro/src/test/java/org/apache/avro/AvroTestUtil.java
index e920e46..c1d2df6 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/AvroTestUtil.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/AvroTestUtil.java
@@ -26,11 +26,11 @@
   private AvroTestUtil() {
   }
 
-  /** 
+  /**
    * Create a temporary file in a test-appropriate directory.
-   * 
+   *
    * @param testClass The test case class requesting the file creation
-   * @param name The name of the file to be created 
+   * @param name The name of the file to be created
    */
   public static File tempFile(Class testClass, String name) {
     File testClassDir = new File(TMPDIR, testClass.getName());
@@ -38,11 +38,11 @@
     return new File(testClassDir, name);
   }
 
-  /** 
+  /**
    * Create a temporary directory in a test-appropriate directory.
-   * 
+   *
    * @param testClass The test case class requesting the directory creation
-   * @param name The name of the directory to be created  
+   * @param name The name of the directory to be created
    */
   public static File tempDirectory(Class testClass, String name) {
     File tmpFile = tempFile(testClass, name);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java b/lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java
index d0846f1..babcd36 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java
@@ -1,6 +1,6 @@
 /**
  * Autogenerated by Avro
- * 
+ *
  * DO NOT EDIT DIRECTLY
  *
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -19,7 +19,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.avro;  
+package org.apache.avro;
 @SuppressWarnings("all")
 @org.apache.avro.specific.AvroGenerated
 public class FooBarSpecificRecord extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
@@ -35,7 +35,7 @@
   /**
    * Default constructor.  Note that this does not initialize fields
    * to their default values from the schema.  If that is desired then
-   * one should use <code>newBuilder()</code>. 
+   * one should use <code>newBuilder()</code>.
    */
   public FooBarSpecificRecord() {}
 
@@ -51,7 +51,7 @@
   }
 
   public org.apache.avro.Schema getSchema() { return SCHEMA$; }
-  // Used by DatumWriter.  Applications should not call. 
+  // Used by DatumWriter.  Applications should not call.
   public java.lang.Object get(int field$) {
     switch (field$) {
     case 0: return id;
@@ -62,7 +62,7 @@
     default: throw new org.apache.avro.AvroRuntimeException("Bad index");
     }
   }
-  // Used by DatumReader.  Applications should not call. 
+  // Used by DatumReader.  Applications should not call.
   @SuppressWarnings(value="unchecked")
   public void put(int field$, java.lang.Object value$) {
     switch (field$) {
@@ -154,17 +154,17 @@
   public static org.apache.avro.FooBarSpecificRecord.Builder newBuilder() {
     return new org.apache.avro.FooBarSpecificRecord.Builder();
   }
-  
+
   /** Creates a new FooBarSpecificRecord RecordBuilder by copying an existing Builder */
   public static org.apache.avro.FooBarSpecificRecord.Builder newBuilder(org.apache.avro.FooBarSpecificRecord.Builder other) {
     return new org.apache.avro.FooBarSpecificRecord.Builder(other);
   }
-  
+
   /** Creates a new FooBarSpecificRecord RecordBuilder by copying an existing FooBarSpecificRecord instance */
   public static org.apache.avro.FooBarSpecificRecord.Builder newBuilder(org.apache.avro.FooBarSpecificRecord other) {
     return new org.apache.avro.FooBarSpecificRecord.Builder(other);
   }
-  
+
   /**
    * RecordBuilder for FooBarSpecificRecord instances.
    */
@@ -181,7 +181,7 @@
     private Builder() {
       super(org.apache.avro.FooBarSpecificRecord.SCHEMA$);
     }
-    
+
     /** Creates a Builder by copying an existing Builder */
     private Builder(org.apache.avro.FooBarSpecificRecord.Builder other) {
       super(other);
@@ -206,7 +206,7 @@
         fieldSetFlags()[4] = true;
       }
     }
-    
+
     /** Creates a Builder by copying an existing FooBarSpecificRecord instance */
     private Builder(org.apache.avro.FooBarSpecificRecord other) {
             super(org.apache.avro.FooBarSpecificRecord.SCHEMA$);
@@ -236,20 +236,20 @@
     public java.lang.Integer getId() {
       return id;
     }
-    
+
     /** Sets the value of the 'id' field */
     public org.apache.avro.FooBarSpecificRecord.Builder setId(int value) {
       validate(fields()[0], value);
       this.id = value;
       fieldSetFlags()[0] = true;
-      return this; 
+      return this;
     }
-    
+
     /** Checks whether the 'id' field has been set */
     public boolean hasId() {
       return fieldSetFlags()[0];
     }
-    
+
     /** Clears the value of the 'id' field */
     public org.apache.avro.FooBarSpecificRecord.Builder clearId() {
       fieldSetFlags()[0] = false;
@@ -260,20 +260,20 @@
     public java.lang.String getName() {
       return name;
     }
-    
+
     /** Sets the value of the 'name' field */
     public org.apache.avro.FooBarSpecificRecord.Builder setName(java.lang.String value) {
       validate(fields()[1], value);
       this.name = value;
       fieldSetFlags()[1] = true;
-      return this; 
+      return this;
     }
-    
+
     /** Checks whether the 'name' field has been set */
     public boolean hasName() {
       return fieldSetFlags()[1];
     }
-    
+
     /** Clears the value of the 'name' field */
     public org.apache.avro.FooBarSpecificRecord.Builder clearName() {
       name = null;
@@ -285,20 +285,20 @@
     public java.util.List<java.lang.String> getNicknames() {
       return nicknames;
     }
-    
+
     /** Sets the value of the 'nicknames' field */
     public org.apache.avro.FooBarSpecificRecord.Builder setNicknames(java.util.List<java.lang.String> value) {
       validate(fields()[2], value);
       this.nicknames = value;
       fieldSetFlags()[2] = true;
-      return this; 
+      return this;
     }
-    
+
     /** Checks whether the 'nicknames' field has been set */
     public boolean hasNicknames() {
       return fieldSetFlags()[2];
     }
-    
+
     /** Clears the value of the 'nicknames' field */
     public org.apache.avro.FooBarSpecificRecord.Builder clearNicknames() {
       nicknames = null;
@@ -310,20 +310,20 @@
     public java.util.List<java.lang.Integer> getRelatedids() {
       return relatedids;
     }
-    
+
     /** Sets the value of the 'relatedids' field */
     public org.apache.avro.FooBarSpecificRecord.Builder setRelatedids(java.util.List<java.lang.Integer> value) {
       validate(fields()[3], value);
       this.relatedids = value;
       fieldSetFlags()[3] = true;
-      return this; 
+      return this;
     }
-    
+
     /** Checks whether the 'relatedids' field has been set */
     public boolean hasRelatedids() {
       return fieldSetFlags()[3];
     }
-    
+
     /** Clears the value of the 'relatedids' field */
     public org.apache.avro.FooBarSpecificRecord.Builder clearRelatedids() {
       relatedids = null;
@@ -335,20 +335,20 @@
     public org.apache.avro.TypeEnum getTypeEnum() {
       return typeEnum;
     }
-    
+
     /** Sets the value of the 'typeEnum' field */
     public org.apache.avro.FooBarSpecificRecord.Builder setTypeEnum(org.apache.avro.TypeEnum value) {
       validate(fields()[4], value);
       this.typeEnum = value;
       fieldSetFlags()[4] = true;
-      return this; 
+      return this;
     }
-    
+
     /** Checks whether the 'typeEnum' field has been set */
     public boolean hasTypeEnum() {
       return fieldSetFlags()[4];
     }
-    
+
     /** Clears the value of the 'typeEnum' field */
     public org.apache.avro.FooBarSpecificRecord.Builder clearTypeEnum() {
       typeEnum = null;
@@ -373,7 +373,7 @@
   }
 
   private static final org.apache.avro.io.DatumWriter
-    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);  
+    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);
 
   @Override public void writeExternal(java.io.ObjectOutput out)
     throws java.io.IOException {
@@ -381,7 +381,7 @@
   }
 
   private static final org.apache.avro.io.DatumReader
-    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);  
+    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);
 
   @Override public void readExternal(java.io.ObjectInput in)
     throws java.io.IOException {
diff --git a/lang/java/avro/src/test/java/org/apache/avro/GenerateBlockingData.java b/lang/java/avro/src/test/java/org/apache/avro/GenerateBlockingData.java
index 72a632b..1e9f895 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/GenerateBlockingData.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/GenerateBlockingData.java
@@ -39,7 +39,7 @@
   private static final int SYNC_INTERVAL = 1000;
   private static ByteArrayOutputStream buffer =
                       new ByteArrayOutputStream(2*SYNC_INTERVAL);
-  
+
   private static EncoderFactory factory = EncoderFactory.get();
   private static Encoder bufOut = EncoderFactory.get().blockingBinaryEncoder(
       buffer, null);
@@ -53,24 +53,24 @@
     buffer.reset();
     blockCount = 0;
   }
-  
+
   public static void main(String[] args) throws Exception {
     if(args.length != 3) {
       System.out.println(
           "Usage: GenerateBlockingData <schemafile> <outputfile> <count>");
       System.exit(-1);
     }
-    
+
     Schema sch = Schema.parse(new File(args[0]));
     File outputFile = new File(args[1]);
     int numObjects = Integer.parseInt(args[2]);
-    
+
     FileOutputStream out = new FileOutputStream(outputFile, false);
     DatumWriter<Object> dout = new GenericDatumWriter<Object>();
     dout.setSchema(sch);
     Encoder vout = factory.directBinaryEncoder(out, null);
     vout.writeLong(numObjects); // metadata:the count of objects in the file
-    
+
     for (Object datum : new RandomData(sch, numObjects)) {
       dout.write(datum, bufOut);
       blockCount++;
diff --git a/lang/java/avro/src/test/java/org/apache/avro/RandomData.java b/lang/java/avro/src/test/java/org/apache/avro/RandomData.java
index c92a980..2dad518 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/RandomData.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/RandomData.java
@@ -48,7 +48,7 @@
     this.seed = seed;
     this.count = count;
   }
-  
+
   public Iterator<Object> iterator() {
     return new Iterator<Object>() {
       private int n;
@@ -61,7 +61,7 @@
       public void remove() { throw new UnsupportedOperationException(); }
     };
   }
-  
+
   @SuppressWarnings(value="unchecked")
   private static Object generate(Schema schema, Random random, int d) {
     switch (schema.getType()) {
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
index 21c99d8..e067496 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
@@ -252,7 +252,7 @@
     } finally {
       reader.close();
     }
-  }  
+  }
 
   public void testReadWithHeader() throws IOException {
     File file = makeFile();
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileConcat.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileConcat.java
index e806857..b30c7b1 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileConcat.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileConcat.java
@@ -48,7 +48,7 @@
     this.codec = codec;
     this.codec2 = codec2;
     this.recompress = recompress;
-    LOG.info("Testing concatenating files, " + codec2 + " into " + codec + 
+    LOG.info("Testing concatenating files, " + codec2 + " into " + codec +
         " with recompress=" + recompress);
   }
 
@@ -134,7 +134,7 @@
       } finally {
         writer2.close();
       }
-      DataFileWriter<Object> concatinto = 
+      DataFileWriter<Object> concatinto =
         new DataFileWriter<Object>(new GenericDatumWriter<Object>())
         .setSyncInterval(syncInterval);
       concatinto.appendTo(file1);
@@ -180,5 +180,5 @@
 
     }
   }
-  
+
 }
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCustomSync.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCustomSync.java
index 9270e8c..e3f3791 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCustomSync.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCustomSync.java
@@ -64,7 +64,7 @@
     } catch (NoSuchAlgorithmException e) {
       throw new RuntimeException(e);
     }
-  }  
+  }
 
   @Test(expected = IOException.class)
   public void testInvalidSync() throws IOException {
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestProtocol.java b/lang/java/avro/src/test/java/org/apache/avro/TestProtocol.java
index 007c273..295ee38 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestProtocol.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestProtocol.java
@@ -32,19 +32,19 @@
     p2.addProp("a","2");
     assertFalse(p1.equals(p2));
   }
-  
+
   @Test
   public void testSplitProtocolBuild() {
     Protocol p = new Protocol("P", null, "foo");
     p.addProp("property", "some value");
-     
+
     String protocolString = p.toString();
     final int mid = protocolString.length() / 2;
     String[] parts = {
       protocolString.substring(0, mid),
       protocolString.substring(mid),
-    }; 
-    
+    };
+
     Protocol parsedStringProtocol = org.apache.avro.Protocol.parse(protocolString);
     Protocol parsedArrayOfStringProtocol =
       org.apache.avro.Protocol.parse(protocolString.substring(0, mid),
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchema.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchema.java
index ba2cab4..242ee8c 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestSchema.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchema.java
@@ -29,7 +29,7 @@
 import org.apache.avro.Schema.Type;
 import org.junit.Test;
 
-public class TestSchema {  
+public class TestSchema {
   @Test
   public void testSplitSchemaBuild() {
     Schema s = SchemaBuilder
@@ -38,10 +38,10 @@
          .name("clientProtocol").type().optional().stringType()
          .name("meta").type().optional().map().values().bytesType()
          .endRecord();
-    
+
     String schemaString = s.toString();
     final int mid = schemaString.length() / 2;
-    
+
     Schema parsedStringSchema = new org.apache.avro.Schema.Parser().parse(s.toString());
     Schema parsedArrayOfStringSchema =
       new org.apache.avro.Schema.Parser().parse
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java
index 58a1a6f..70dc1e5 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java
@@ -72,13 +72,13 @@
     Assert.assertEquals(new Schema.Field("f2", optional, null, true),
         fields.get(2));
   }
-  
+
   @Test
   public void testDoc() {
     Schema s = SchemaBuilder.fixed("myfixed").doc("mydoc").size(1);
     Assert.assertEquals("mydoc", s.getDoc());
   }
-  
+
   @Test
   public void testProps() {
     Schema s = SchemaBuilder.builder().intBuilder()
@@ -113,7 +113,7 @@
       .fields()
         .name("myint").type().intType().noDefault()
         .endRecord();
-    
+
     Assert.assertEquals("myrecord", s1.getName());
     Assert.assertEquals("myrecord", s2.getName());
     Assert.assertEquals("myrecord", s3.getName());
@@ -137,7 +137,7 @@
         .name("f0").type().stringType().noDefault()
         .endRecord();
   }
-  
+
   @Test
   public void testBoolean() {
     Schema.Type type = Schema.Type.BOOLEAN;
@@ -147,7 +147,7 @@
         .booleanBuilder().prop("p", "v").endBoolean();
     Assert.assertEquals(expected, built1);
   }
-  
+
   @Test
   public void testInt() {
     Schema.Type type = Schema.Type.INT;
@@ -157,7 +157,7 @@
         .intBuilder().prop("p", "v").endInt();
     Assert.assertEquals(expected, built1);
   }
-  
+
   @Test
   public void testLong() {
     Schema.Type type = Schema.Type.LONG;
@@ -167,7 +167,7 @@
         .longBuilder().prop("p", "v").endLong();
     Assert.assertEquals(expected, built1);
   }
-  
+
   @Test
   public void testFloat() {
     Schema.Type type = Schema.Type.FLOAT;
@@ -177,7 +177,7 @@
         .floatBuilder().prop("p", "v").endFloat();
     Assert.assertEquals(expected, built1);
   }
-  
+
   @Test
   public void testDuble() {
     Schema.Type type = Schema.Type.DOUBLE;
@@ -187,7 +187,7 @@
         .doubleBuilder().prop("p", "v").endDouble();
     Assert.assertEquals(expected, built1);
   }
-  
+
   @Test
   public void testString() {
     Schema.Type type = Schema.Type.STRING;
@@ -197,7 +197,7 @@
         .stringBuilder().prop("p", "v").endString();
     Assert.assertEquals(expected, built1);
   }
-  
+
   @Test
   public void testBytes() {
     Schema.Type type = Schema.Type.BYTES;
@@ -207,7 +207,7 @@
         .bytesBuilder().prop("p", "v").endBytes();
     Assert.assertEquals(expected, built1);
   }
-  
+
   @Test
   public void testNull() {
     Schema.Type type = Schema.Type.NULL;
@@ -218,7 +218,7 @@
     Assert.assertEquals(expected, built1);
   }
 
-  
+
   private Schema primitive(Schema.Type type, Schema bare) {
     // test creation of bare schema by name
     Schema bareByName = SchemaBuilder.builder().type(type.getName());
@@ -229,7 +229,7 @@
     p.addProp("p", "v");
     return p;
   }
-  
+
 
 //  @Test
 //  public void testError() {
@@ -328,16 +328,16 @@
     types.add(Schema.create(Schema.Type.LONG));
     types.add(Schema.create(Schema.Type.NULL));
     Schema expected = Schema.createUnion(types);
-    
+
     Schema schema = SchemaBuilder.unionOf()
         .longType().and()
         .nullType().endUnion();
     Assert.assertEquals(expected, schema);
-    
+
     schema = SchemaBuilder.nullable().longType();
     Assert.assertEquals(expected, schema);
   }
-  
+
   @Test
   public void testFields() {
     Schema rec = SchemaBuilder.record("Rec").fields()
@@ -353,7 +353,7 @@
     Assert.assertEquals(Order.IGNORE, rec.getField("ignored").order());
     Assert.assertTrue(rec.getField("aliased").aliases().contains("anAlias"));
   }
-  
+
   @Test
   public void testFieldShortcuts() {
     Schema full = SchemaBuilder.record("Blah").fields()
@@ -379,7 +379,7 @@
         .name("obytes").type().optional().bytesType()
         .name("nbytes").type().nullable().bytesType().bytesDefault(new byte[] {1,2,3})
         .endRecord();
-    
+
     Schema shortcut = SchemaBuilder.record("Blah").fields()
         .requiredBoolean("rbool")
         .optionalBoolean("obool")
@@ -403,10 +403,10 @@
         .optionalBytes("obytes")
         .nullableBytes("nbytes", new byte[] {1,2,3})
         .endRecord();
-    
+
     Assert.assertEquals(full, shortcut);
   }
-  
+
   @Test
   public void testNames() {
     // no contextual namespace
@@ -425,11 +425,11 @@
     checkField(r, expected, "f3");
     checkField(r, expected, "f4");
     checkField(r, expected, "f5");
-    
+
     // context namespace
     Schema f = SchemaBuilder.builder("").fixed("Foo").size(1);
     Assert.assertEquals(Schema.createFixed("Foo", null, null, 1), f);
-   
+
     // context namespace from record matches
     r = SchemaBuilder.record("Rec").namespace("org.foo").fields()
         .name("f0").type().fixed("MyFixed").size(1).noDefault()
@@ -465,7 +465,7 @@
     checkField(r, expected, "f3");
     checkField(r, expected, "f4");
     checkField(r, expected, "f5");
-    
+
     // context namespace from record, nested has no namespace
     expected = Schema.createFixed("MyFixed", null, null, 1);
     r = SchemaBuilder.record("Rec").namespace("org.rec").fields()
@@ -474,18 +474,18 @@
         .endRecord();
     checkField(r, expected, "f0");
     checkField(r, expected, "f1");
-    
+
     // mimic names of primitives, but with a namesapce.  This is OK
     SchemaBuilder.fixed("org.test.long").size(1);
     SchemaBuilder.fixed("long").namespace("org.test").size(1);
     SchemaBuilder.builder("org.test").fixed("long").size(1);
 
   }
-  
+
   private void checkField(Schema r, Schema expected, String name) {
     Assert.assertEquals(expected, r.getField(name).schema());
   }
-  
+
   @Test(expected=SchemaParseException.class)
   public void testNamesFailRedefined() {
     SchemaBuilder.record("Rec").fields()
@@ -498,12 +498,12 @@
   public void testNamesFailAbsent() {
     SchemaBuilder.builder().type("notdefined");
   }
-  
+
   @Test(expected=AvroTypeException.class)
   public void testNameReserved() {
     SchemaBuilder.fixed("long").namespace("").size(1);
   }
-  
+
   @Test
   public void testFieldTypesAndDefaultValues() {
     byte[] bytedef = new byte[]{3};
@@ -513,21 +513,21 @@
     mapdef.put("a", "A");
     ArrayList<String> arrdef = new ArrayList<String>();
     arrdef.add("arr");
-    
+
     Schema rec = SchemaBuilder.record("inner").fields()
       .name("f").type().intType().noDefault()
       .endRecord();
-    
+
     Schema rec2 = SchemaBuilder.record("inner2").fields()
       .name("f2").type().intType().noDefault()
       .endRecord();
-    
-    GenericData.Record recdef = 
+
+    GenericData.Record recdef =
         new GenericRecordBuilder(rec).set("f", 1).build();
-        
+
     GenericData.Record recdef2 =
         new GenericRecordBuilder(rec2).set("f2", 2).build();
-    
+
     Schema r = SchemaBuilder.record("r").fields()
       .name("boolF").type().booleanType().booleanDefault(false)
       .name("intF").type().intType().intDefault(1)
@@ -580,10 +580,10 @@
         .name("f2").type().intType().noDefault()
         .endRecord().and().intType().endUnion().recordDefault(recdef2)
       .endRecord();
-    
+
     GenericData.Record newRec =
         new GenericRecordBuilder(r).build();
-    
+
     Assert.assertEquals(false, newRec.get("boolF"));
     Assert.assertEquals(false, newRec.get("boolU"));
     Assert.assertEquals(1, newRec.get("intF"));
@@ -602,18 +602,18 @@
     Assert.assertEquals(bufdef, newRec.get("bytesU"));
     Assert.assertNull(newRec.get("nullF"));
     Assert.assertNull(newRec.get("nullU"));
-    Assert.assertArrayEquals(bytedef, 
+    Assert.assertArrayEquals(bytedef,
         ((GenericData.Fixed)newRec.get("fixedF1")).bytes());
-    Assert.assertArrayEquals(bytedef, 
+    Assert.assertArrayEquals(bytedef,
         ((GenericData.Fixed)newRec.get("fixedF2")).bytes());
-    Assert.assertArrayEquals(bytedef, 
+    Assert.assertArrayEquals(bytedef,
         ((GenericData.Fixed)newRec.get("fixedF3")).bytes());
-    Assert.assertArrayEquals(bytedef, 
+    Assert.assertArrayEquals(bytedef,
         ((GenericData.Fixed)newRec.get("fixedU")).bytes());
     Assert.assertEquals("S", newRec.get("enumF").toString());
     Assert.assertEquals("SS", newRec.get("enumU").toString());
     @SuppressWarnings("unchecked")
-    Map<CharSequence, CharSequence> map = 
+    Map<CharSequence, CharSequence> map =
       (Map<CharSequence, CharSequence>) newRec.get("mapF");
     Assert.assertEquals(mapdef.size(), map.size());
     for(Map.Entry<CharSequence, CharSequence> e : map.entrySet()) {
@@ -622,7 +622,7 @@
     }
     Assert.assertEquals(newRec.get("mapF"), newRec.get("mapU"));
     @SuppressWarnings("unchecked")
-    GenericData.Array<CharSequence> arr = 
+    GenericData.Array<CharSequence> arr =
       (GenericData.Array<CharSequence>) newRec.get("arrayF");
     Assert.assertEquals(arrdef.size(), arr.size());
     for(CharSequence c : arr) {
@@ -633,15 +633,15 @@
     Assert.assertEquals(recdef2, newRec.get("recordU"));
     Assert.assertEquals("S", newRec.get("byName").toString());
   }
-  
+
   @Test(expected=SchemaBuilderException.class)
   public void testBadDefault() {
     SchemaBuilder.record("r").fields()
       .name("f").type(Schema.create(Schema.Type.INT)).withDefault(new Object())
       .endRecord();
   }
-  
-  @Test 
+
+  @Test
   public void testUnionFieldBuild() {
     SchemaBuilder.record("r").fields()
       .name("allUnion").type().unionOf()
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java
index f8c0413..cb8a6d7 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java
@@ -111,7 +111,7 @@
                            s.getBytes("UTF-8"));
       return altExtend(SchemaNormalization.EMPTY64, 64, tmp, POSTFIX);
     } catch (java.io.UnsupportedEncodingException e)
-      { throw new RuntimeException(e); } 
+      { throw new RuntimeException(e); }
   }
 
   private static long altExtend(long poly, int degree, long fp, byte[] b) {
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TypeEnum.java b/lang/java/avro/src/test/java/org/apache/avro/TypeEnum.java
index 3cff7fe..de91bb3 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TypeEnum.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TypeEnum.java
@@ -1,6 +1,6 @@
 /**
  * Autogenerated by Avro
- * 
+ *
  * DO NOT EDIT DIRECTLY
  *
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -19,10 +19,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.avro;  
+package org.apache.avro;
 @SuppressWarnings("all")
 @org.apache.avro.specific.AvroGenerated
-public enum TypeEnum { 
+public enum TypeEnum {
   a, b, c  ;
   public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"TypeEnum\",\"namespace\":\"org.apache.avro\",\"symbols\":[\"a\",\"b\",\"c\"]}");
   public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
diff --git a/lang/java/avro/src/test/java/org/apache/avro/data/RecordBuilderBaseTest.java b/lang/java/avro/src/test/java/org/apache/avro/data/RecordBuilderBaseTest.java
index c3f236a..0a47e73 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/data/RecordBuilderBaseTest.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/data/RecordBuilderBaseTest.java
@@ -34,57 +34,57 @@
 public class RecordBuilderBaseTest {
   private static Set<Type> primitives;
   private static Set<Type> nonNullPrimitives;
-  
+
   @BeforeClass()
   public static void setUpBeforeClass() {
     primitives = new HashSet<Type>(Arrays.asList(Type.values()));
-    primitives.removeAll(Arrays.asList(new Type[] { 
-        Type.RECORD, Type.ENUM, Type.ARRAY, Type.MAP, Type.UNION, Type.FIXED 
+    primitives.removeAll(Arrays.asList(new Type[] {
+        Type.RECORD, Type.ENUM, Type.ARRAY, Type.MAP, Type.UNION, Type.FIXED
     }));
-    
+
     nonNullPrimitives = new HashSet<Type>(primitives);
     nonNullPrimitives.remove(Type.NULL);
   }
-  
+
   @Test
-  public void testIsValidValueWithPrimitives() { 
+  public void testIsValidValueWithPrimitives() {
     // Verify that a non-null value is valid for all primitives:
     for (Type type : primitives) {
       Field f = new Field("f", Schema.create(type), null, null);
       Assert.assertTrue(RecordBuilderBase.isValidValue(f, new Object()));
     }
-    
+
     // Verify that null is not valid for all non-null primitives:
     for (Type type : nonNullPrimitives) {
       Field f = new Field("f", Schema.create(type), null, null);
       Assert.assertFalse(RecordBuilderBase.isValidValue(f, null));
     }
   }
-  
+
   @Test
   public void testIsValidValueWithNullField() {
     // Verify that null is a valid value for null fields:
     Assert.assertTrue(RecordBuilderBase.isValidValue(
         new Field("f", Schema.create(Type.NULL), null, null), null));
   }
-  
+
   @Test
   public void testIsValidValueWithUnion() {
     // Verify that null values are not valid for a union with no null type:
-    Schema unionWithoutNull = Schema.createUnion(Arrays.asList(new Schema[] { 
+    Schema unionWithoutNull = Schema.createUnion(Arrays.asList(new Schema[] {
         Schema.create(Type.STRING), Schema.create(Type.BOOLEAN)
     }));
-    
+
     Assert.assertTrue(RecordBuilderBase.isValidValue(
         new Field("f", unionWithoutNull, null, null), new Object()));
     Assert.assertFalse(RecordBuilderBase.isValidValue(
         new Field("f", unionWithoutNull, null, null), null));
-    
+
     // Verify that null values are valid for a union with a null type:
-    Schema unionWithNull = Schema.createUnion(Arrays.asList(new Schema[] { 
+    Schema unionWithNull = Schema.createUnion(Arrays.asList(new Schema[] {
         Schema.create(Type.STRING), Schema.create(Type.NULL)
     }));
-    
+
     Assert.assertTrue(RecordBuilderBase.isValidValue(
         new Field("f", unionWithNull, null, null), new Object()));
     Assert.assertTrue(RecordBuilderBase.isValidValue(
diff --git a/lang/java/avro/src/test/java/org/apache/avro/file/TestBZip2Codec.java b/lang/java/avro/src/test/java/org/apache/avro/file/TestBZip2Codec.java
index febcaef..2a9cad2 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/file/TestBZip2Codec.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/file/TestBZip2Codec.java
@@ -24,39 +24,39 @@
 import static org.junit.Assert.assertTrue;
 
 public class TestBZip2Codec {
-  
+
   @Test
   public void testBZip2CompressionAndDecompression() throws IOException {
     Codec codec = CodecFactory.fromString("bzip2").createInstance();
     assertTrue(codec instanceof BZip2Codec);
     assertTrue(codec.getName().equals("bzip2"));
-    
+
     //This is 3 times the byte buffer on the BZip2 decompress plus some extra
     final int inputByteSize = BZip2Codec.DEFAULT_BUFFER_SIZE * 3 + 42;
-    
+
     byte[] inputByteArray = new byte[inputByteSize];
-    
+
     //Generate something that will compress well
     for (int i = 0; i < inputByteSize; i++) {
       inputByteArray[i] = (byte)(65 + i % 10);
     }
-    
+
     ByteBuffer inputByteBuffer = ByteBuffer.allocate(inputByteSize * 2);
     inputByteBuffer.put(inputByteArray);
-    
+
     ByteBuffer compressedBuffer = codec.compress(inputByteBuffer);
-    
+
     //Make sure something returned
     assertTrue(compressedBuffer.array().length > 0);
     //Make sure the compressed output is smaller then the original
     assertTrue(compressedBuffer.array().length < inputByteArray.length);
-    
+
     ByteBuffer decompressedBuffer = codec.decompress(compressedBuffer);
-    
+
     //The original array should be the same length as the decompressed array
     assertTrue(decompressedBuffer.array().length == inputByteArray.length);
-    
-    //Every byte in the outputByteArray should equal every byte in the input array 
+
+    //Every byte in the outputByteArray should equal every byte in the input array
     byte[] outputByteArray = decompressedBuffer.array();
     for (int i = 0; i < inputByteSize; i++) {
       inputByteArray[i] = outputByteArray[i];
diff --git a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java
index b8b59e2..7b33971 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java
@@ -47,12 +47,12 @@
 import org.junit.Test;
 
 public class TestGenericData {
-  
+
   @Test(expected=AvroRuntimeException.class)
     public void testrecordConstructorNullSchema() throws Exception {
     new GenericData.Record(null);
   }
-    
+
   @Test(expected=AvroRuntimeException.class)
     public void testrecordConstructorWrongSchema() throws Exception {
     new GenericData.Record(Schema.create(Schema.Type.INT));
@@ -62,7 +62,7 @@
     public void testArrayConstructorNullSchema() throws Exception {
     new GenericData.Array<Object>(1, null);
   }
-    
+
   @Test(expected=AvroRuntimeException.class)
     public void testArrayConstructorWrongSchema() throws Exception {
     new GenericData.Array<Object>(1, Schema.create(Schema.Type.INT));
@@ -95,7 +95,7 @@
     Record r = new GenericData.Record(s);
     r.put("invalidFieldName", "someValue");
   }
-  
+
   @Test
   /** Make sure that even with nulls, hashCode() doesn't throw NPE. */
   public void testHashCode() {
@@ -109,7 +109,7 @@
     r.put(0, stuff);
     GenericData.get().hashCode(r, schema);
   }
-  
+
   @Test
   public void testEquals() {
     Schema s = recordSchema();
@@ -118,7 +118,7 @@
     GenericRecord r2 = new GenericData.Record(s);
     Collection<CharSequence> l0 = new ArrayDeque<CharSequence>();
     List<CharSequence> l1 = new ArrayList<CharSequence>();
-    GenericArray<CharSequence> l2 = 
+    GenericArray<CharSequence> l2 =
       new GenericData.Array<CharSequence>(1,s.getFields().get(0).schema());
     String foo = "foo";
     l0.add(new StringBuffer(foo));
@@ -131,13 +131,13 @@
     assertEquals(r0, r2);
     assertEquals(r1, r2);
   }
-  
+
   private Schema recordSchema() {
     List<Field> fields = new ArrayList<Field>();
     fields.add(new Field("anArray", Schema.createArray(Schema.create(Type.STRING)), null, null));
     Schema schema = Schema.createRecord("arrayFoo", "test", "mytest", false);
     schema.setFields(fields);
-    
+
     return schema;
   }
 
@@ -172,7 +172,7 @@
     GenericData.Record record = new GenericData.Record(schema);
     assertNull(record.get("does not exist"));
   }
-  
+
   @Test
   public void testArrayReversal() {
       Schema schema = Schema.createArray(Schema.create(Schema.Type.INT));
@@ -236,8 +236,8 @@
     assertEquals(new Integer(6), array.get(0));
     assertEquals(8, array.size());
     try {
-	array.get(9);
-	fail("Expected IndexOutOfBoundsException after adding elements");
+      array.get(9);
+      fail("Expected IndexOutOfBoundsException after adding elements");
     } catch (IndexOutOfBoundsException e){}
   }
   @Test
@@ -297,24 +297,24 @@
     assertEquals(10, array.size());
     assertEquals(new Integer(55), array.get(5));
   }
-  
+
   @Test
   public void testToStringIsJson() throws JsonParseException, IOException {
     Field stringField = new Field("string", Schema.create(Type.STRING), null, null);
     Field enumField = new Field("enum", Schema.createEnum("my_enum", "doc", null, Arrays.asList("a", "b", "c")), null, null);
     Schema schema = Schema.createRecord("my_record", "doc", "mytest", false);
     schema.setFields(Arrays.asList(stringField, enumField));
-    
+
     GenericRecord r = new GenericData.Record(schema);
     // \u2013 is EN DASH
     r.put(stringField.name(), "hello\nthere\"\tyou\u2013}");
     r.put(enumField.name(), new GenericData.EnumSymbol(enumField.schema(),"a"));
-    
+
     String json = r.toString();
     JsonFactory factory = new JsonFactory();
     JsonParser parser = factory.createJsonParser(json);
     ObjectMapper mapper = new ObjectMapper();
-    
+
     // will throw exception if string is not parsable json
     mapper.readTree(parser);
   }
@@ -356,7 +356,7 @@
     fields.add(integerField);
     Schema record = Schema.createRecord("test", null, null, false);
     record.setFields(fields);
-    
+
     ByteArrayOutputStream b1 = new ByteArrayOutputStream(5);
     ByteArrayOutputStream b2 = new ByteArrayOutputStream(5);
     BinaryEncoder b1Enc = EncoderFactory.get().binaryEncoder(b1, null);
@@ -389,7 +389,7 @@
       fail("IOException while writing records to output stream.");
     }
   }
-  
+
   @Test
   public void testEnumCompare() {
     Schema s = Schema.createEnum("Kind",null,null,Arrays.asList("Z","Y","X"));
@@ -410,10 +410,10 @@
     Field byte_field =
       new Field("bytes", Schema.create(Type.BYTES), null, null);
     schema.setFields(Arrays.asList(byte_field));
-    
+
     GenericRecord record = new GenericData.Record(schema);
     record.put(byte_field.name(), buffer);
-    
+
     GenericRecord copy = GenericData.get().deepCopy(schema, record);
     ByteBuffer buffer_copy = (ByteBuffer) copy.get(byte_field.name());
 
@@ -461,7 +461,7 @@
   public void validateRequiresGenericSymbolForEnumSchema() {
     final Schema schema = Schema.createEnum("my_enum", "doc", "namespace", Arrays.asList("ONE","TWO","THREE"));
     final GenericData gd = GenericData.get();
-    
+
     /* positive cases */
     assertTrue(gd.validate(schema, new GenericData.EnumSymbol(schema, "ONE")));
     assertTrue(gd.validate(schema, new GenericData.EnumSymbol(schema, anEnum.ONE)));
diff --git a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericDatumWriter.java b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericDatumWriter.java
index ed52874..f93a326 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericDatumWriter.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericDatumWriter.java
@@ -61,7 +61,7 @@
     Encoder e = EncoderFactory.get().jsonEncoder(s, bao);
     w.write(r, e);
     e.flush();
-    
+
     Object o = new GenericDatumReader<GenericRecord>(s).read(null,
         DecoderFactory.get().jsonDecoder(s, new ByteArrayInputStream(bao.toByteArray())));
     assertEquals(r, o);
@@ -81,7 +81,7 @@
 
     final TestEncoder e = new TestEncoder(EncoderFactory.get()
         .directBinaryEncoder(bao, null), sizeWrittenSignal, eltAddedSignal);
-    
+
     // call write in another thread
     ExecutorService executor = Executors.newSingleThreadExecutor();
     Future<Void> result = executor.submit(new Callable<Void>() {
@@ -103,7 +103,7 @@
       assertTrue(ex.getCause() instanceof ConcurrentModificationException);
     }
   }
-  
+
 
   @Test
   public void testMapConcurrentModification() throws Exception {
@@ -119,7 +119,7 @@
 
     final TestEncoder e = new TestEncoder(EncoderFactory.get()
         .directBinaryEncoder(bao, null), sizeWrittenSignal, eltAddedSignal);
-    
+
     // call write in another thread
     ExecutorService executor = Executors.newSingleThreadExecutor();
     Future<Void> result = executor.submit(new Callable<Void>() {
@@ -141,20 +141,20 @@
       assertTrue(ex.getCause() instanceof ConcurrentModificationException);
     }
   }
-  
+
   static class TestEncoder extends Encoder {
-    
+
     Encoder e;
     CountDownLatch sizeWrittenSignal;
     CountDownLatch eltAddedSignal;
-    
+
     TestEncoder(Encoder encoder, CountDownLatch sizeWrittenSignal,
         CountDownLatch eltAddedSignal) {
       this.e = encoder;
       this.sizeWrittenSignal = sizeWrittenSignal;
       this.eltAddedSignal = eltAddedSignal;
     }
-    
+
     @Override
     public void writeArrayStart() throws IOException {
       e.writeArrayStart();
@@ -176,7 +176,7 @@
         // ignore
       }
     }
-    
+
     @Override
     public void flush() throws IOException { e.flush(); }
     @Override
diff --git a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericRecordBuilder.java b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericRecordBuilder.java
index 8f7dee5..cc641f7 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericRecordBuilder.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericRecordBuilder.java
@@ -37,15 +37,15 @@
   public void testGenericBuilder() {
     Schema schema = recordSchema();
     GenericRecordBuilder builder = new GenericRecordBuilder(schema);
-    
+
     // Verify that builder has no fields set after initialization:
     for (Field field : schema.getFields()) {
-      Assert.assertFalse("RecordBuilder should not have field " + field.name(), 
+      Assert.assertFalse("RecordBuilder should not have field " + field.name(),
           builder.has(field.name()));
       Assert.assertNull("Field " + field.name() + " should be null",
           builder.get(field.name()));
     }
-    
+
     // Set field in builder:
     builder.set("intField", 1);
     List<String> anArray = Arrays.asList(new String[] { "one", "two", "three" });
@@ -54,47 +54,47 @@
     Assert.assertEquals(anArray, builder.get("anArray"));
     Assert.assertFalse("id should not be set", builder.has("id"));
     Assert.assertNull(builder.get("id"));
-    
+
     // Build the record, and verify that fields are set:
     Record record = builder.build();
     Assert.assertEquals(new Integer(1), record.get("intField"));
     Assert.assertEquals(anArray, record.get("anArray"));
     Assert.assertNotNull(record.get("id"));
     Assert.assertEquals("0", record.get("id").toString());
-    
+
     // Test copy constructors:
     Assert.assertEquals(builder, new GenericRecordBuilder(builder));
     Assert.assertEquals(record, new GenericRecordBuilder(record).build());
-    
+
     // Test clear:
     builder.clear("intField");
     Assert.assertFalse(builder.has("intField"));
     Assert.assertNull(builder.get("intField"));
   }
-  
+
   @Test(expected=org.apache.avro.AvroRuntimeException.class)
   public void attemptToSetNonNullableFieldToNull() {
     new GenericRecordBuilder(recordSchema()).set("intField", null);
   }
-  
+
   @Test(expected=org.apache.avro.AvroRuntimeException.class)
   public void buildWithoutSettingRequiredFields1() {
     new GenericRecordBuilder(recordSchema()).build();
   }
-  
+
   @Test()
   public void buildWithoutSettingRequiredFields2() {
     try {
       new GenericRecordBuilder(recordSchema()).
       set("anArray", Arrays.asList(new String[] { "one" })).
       build();
-      Assert.fail("Should have thrown " + 
+      Assert.fail("Should have thrown " +
           AvroRuntimeException.class.getCanonicalName());
     } catch (AvroRuntimeException e) {
       Assert.assertTrue(e.getMessage().contains("intField"));
     }
   }
-  
+
   /** Creates a test record schema */
   private static Schema recordSchema() {
     List<Field> fields = new ArrayList<Field>();
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/LegacyBinaryEncoder.java b/lang/java/avro/src/test/java/org/apache/avro/io/LegacyBinaryEncoder.java
index ea8d778..e8b1b0a 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/LegacyBinaryEncoder.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/LegacyBinaryEncoder.java
@@ -43,11 +43,11 @@
  */
 public class LegacyBinaryEncoder extends Encoder {
   protected OutputStream out;
-  
+
   private interface ByteWriter {
     void write(ByteBuffer bytes) throws IOException;
   }
-  
+
   private static final class SimpleByteWriter implements ByteWriter {
     private final OutputStream out;
 
@@ -61,7 +61,7 @@
       out.write(bytes.array(), bytes.position(), bytes.remaining());
     }
   }
-  
+
   private final ByteWriter byteWriter;
 
   /** Create a writer that sends its output to the underlying stream
@@ -80,7 +80,7 @@
 
   @Override
   public void writeNull() throws IOException { }
-  
+
   @Override
   public void writeBoolean(boolean b) throws IOException {
     out.write(b ? 1 : 0);
@@ -95,7 +95,7 @@
   public void writeLong(long n) throws IOException {
     encodeLong(n, out);
   }
-  
+
   @Override
   public void writeFloat(float f) throws IOException {
     encodeFloat(f, out);
@@ -110,29 +110,29 @@
   public void writeString(Utf8 utf8) throws IOException {
     encodeString(utf8.getBytes(), 0, utf8.getByteLength());
   }
-  
+
   @Override
   public void writeString(String string) throws IOException {
     byte[] bytes = Utf8.getBytesFor(string);
     encodeString(bytes, 0, bytes.length);
   }
-  
+
   private void encodeString(byte[] bytes, int offset, int length) throws IOException {
     encodeLong(length, out);
     out.write(bytes, offset, length);
   }
-  
+
   @Override
   public void writeBytes(ByteBuffer bytes) throws IOException {
     byteWriter.write(bytes);
   }
-  
+
   @Override
   public void writeBytes(byte[] bytes, int start, int len) throws IOException {
     encodeLong(len, out);
     out.write(bytes, start, len);
   }
-  
+
   @Override
   public void writeFixed(byte[] bytes, int start, int len) throws IOException {
     out.write(bytes, start, len);
@@ -153,7 +153,7 @@
       writeLong(itemCount);
     }
   }
-  
+
   @Override
   public void startItem() throws IOException {
   }
@@ -176,7 +176,7 @@
   public void writeIndex(int unionIndex) throws IOException {
     encodeLong(unionIndex, out);
   }
-  
+
   protected static void encodeLong(long n, OutputStream o) throws IOException {
     n = (n << 1) ^ (n >> 63); // move sign to low-order bit
     while ((n & ~0x7F) != 0) {
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryDecoder.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
index aa3e1d7..572be60 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
@@ -52,7 +52,7 @@
   public TestBinaryDecoder(boolean useDirect) {
     this.useDirect = useDirect;
   }
-  
+
   @Parameters
   public static Collection<Object[]> data() {
     return Arrays.asList(new Object[][] {
@@ -60,7 +60,7 @@
         { false },
     });
   }
-  
+
   private Decoder newDecoderWithNoData() throws IOException {
     return newDecoder(new byte[0]);
   }
@@ -68,7 +68,7 @@
   private Decoder newDecoder(byte[] bytes, int start, int len)
     throws IOException {
     return factory.binaryDecoder(bytes, start, len, null);
-    
+
   }
 
   private Decoder newDecoder(InputStream in) {
@@ -89,37 +89,37 @@
   public void testEOFBoolean() throws IOException {
     newDecoderWithNoData().readBoolean();
   }
-  
+
   @Test(expected=EOFException.class)
   public void testEOFInt() throws IOException {
     newDecoderWithNoData().readInt();
   }
-  
+
   @Test(expected=EOFException.class)
   public void testEOFLong() throws IOException {
     newDecoderWithNoData().readLong();
   }
-  
+
   @Test(expected=EOFException.class)
   public void testEOFFloat() throws IOException {
     newDecoderWithNoData().readFloat();
   }
-  
+
   @Test(expected=EOFException.class)
   public void testEOFDouble() throws IOException {
     newDecoderWithNoData().readDouble();
   }
-  
+
   @Test(expected=EOFException.class)
   public void testEOFBytes() throws IOException {
     newDecoderWithNoData().readBytes(null);
   }
-  
+
   @Test(expected=EOFException.class)
   public void testEOFString() throws IOException {
     newDecoderWithNoData().readString(new Utf8("a"));
   }
-  
+
   @Test(expected=EOFException.class)
   public void testEOFFixed() throws IOException {
     newDecoderWithNoData().readFixed(new byte[1]);
@@ -129,32 +129,32 @@
   public void testEOFEnum() throws IOException {
     newDecoderWithNoData().readEnum();
   }
-  
+
   @Test
   public void testReuse() throws IOException {
     ByteBufferOutputStream bbo1 = new ByteBufferOutputStream();
     ByteBufferOutputStream bbo2 = new ByteBufferOutputStream();
     byte[] b1 = new byte[] { 1, 2 };
-    
+
     BinaryEncoder e1 = e_factory.binaryEncoder(bbo1, null);
     e1.writeBytes(b1);
     e1.flush();
-    
+
     BinaryEncoder e2 = e_factory.binaryEncoder(bbo2, null);
     e2.writeBytes(b1);
     e2.flush();
-    
+
     DirectBinaryDecoder d = new DirectBinaryDecoder(
         new ByteBufferInputStream(bbo1.getBufferList()));
     ByteBuffer bb1 = d.readBytes(null);
     Assert.assertEquals(b1.length, bb1.limit() - bb1.position());
-    
+
     d.configure(new ByteBufferInputStream(bbo2.getBufferList()));
     ByteBuffer bb2 = d.readBytes(null);
     Assert.assertEquals(b1.length, bb2.limit() - bb2.position());
-    
+
   }
-  
+
   private static byte[] data = null;
   private static int seed = -1;
   private static Schema schema = null;
@@ -180,7 +180,7 @@
     writer.setSchema(schema);
     ByteArrayOutputStream baos = new ByteArrayOutputStream(8192);
     BinaryEncoder encoder = e_factory.binaryEncoder(baos, null);
-    
+
     for (Object datum : new RandomData(schema, count, seed)) {
       writer.write(datum, encoder);
       records.add(datum);
@@ -193,14 +193,14 @@
   public void testDecodeFromSources() throws IOException {
     GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
     reader.setSchema(schema);
-    
+
     ByteArrayInputStream is = new ByteArrayInputStream(data);
     ByteArrayInputStream is2 = new ByteArrayInputStream(data);
     ByteArrayInputStream is3 = new ByteArrayInputStream(data);
 
     Decoder fromInputStream = newDecoder(is);
     Decoder fromArray = newDecoder(data);
-    
+
     byte[] data2 = new byte[data.length + 30];
     Arrays.fill(data2, (byte)0xff);
     System.arraycopy(data, 0, data2, 15, data.length);
@@ -213,7 +213,7 @@
     BinaryDecoder initOnArray = factory.binaryDecoder(is3, null);
     initOnArray = factory.binaryDecoder(
         data, 0, data.length, initOnArray);
-    
+
     for (Object datum : records) {
       Assert.assertEquals(
           "InputStream based BinaryDecoder result does not match",
@@ -272,7 +272,7 @@
       Assert.assertFalse(bad.read() == check2.read());
     }
   }
-  
+
   @Test
   public void testInputStreamPartiallyUsed() throws IOException {
     BinaryDecoder bd = factory.binaryDecoder(
@@ -281,7 +281,7 @@
     InputStream check = new ByteArrayInputStream(data);
     // triggers buffer fill if unused and tests isEnd()
     try {
-      Assert.assertFalse(bd.isEnd()); 
+      Assert.assertFalse(bd.isEnd());
     } catch (UnsupportedOperationException e) {
       // this is ok if its a DirectBinaryDecoder.
       if (bd.getClass() != DirectBinaryDecoder.class) {
@@ -296,7 +296,7 @@
   private void validateInputStreamReads(InputStream test, InputStream check)
       throws IOException {
     byte[] bt = new byte[7];
-    byte[] bc = new byte[7]; 
+    byte[] bc = new byte[7];
     while (true) {
       int t = test.read();
       int c = check.read();
@@ -318,7 +318,7 @@
     Assert.assertFalse(test.getClass() != ByteArrayInputStream.class && test.markSupported());
     test.close();
   }
-  
+
   private void validateInputStreamSkips(InputStream test, InputStream check) throws IOException {
     while(true) {
       long t2 = test.skip(19);
@@ -383,7 +383,7 @@
     Arrays.fill(badint, (byte)0xff);
     newDecoder(badint).readLong();
   }
-  
+
   @Test(expected=EOFException.class)
   public void testFloatTooShort() throws IOException {
     byte[] badint = new byte[3];
@@ -435,7 +435,7 @@
       bd.skipFixed(8);
       long leftover = bd.skipArray();
       // booleans are one byte, array trailer is one byte
-      bd.skipFixed((int)leftover + 1); 
+      bd.skipFixed((int)leftover + 1);
       bd.skipFixed(0);
       bd.readLong();
     }
@@ -447,14 +447,14 @@
     }
     Assert.assertTrue(null != eof);
   }
-  
+
   @Test(expected = EOFException.class)
   public void testEOF() throws IOException {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     Encoder e = EncoderFactory.get().binaryEncoder(baos, null);
     e.writeLong(0x10000000000000l);
     e.flush();
-      
+
     Decoder d = newDecoder(new ByteArrayInputStream(baos.toByteArray()));
     Assert.assertEquals(0x10000000000000l, d.readLong());
     d.readInt();
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryEncoderFidelity.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryEncoderFidelity.java
index 997ab94..505798a 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryEncoderFidelity.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryEncoderFidelity.java
@@ -28,7 +28,7 @@
 import org.junit.Test;
 
 public class TestBinaryEncoderFidelity {
-  
+
   static byte[] legacydata;
   static byte[] complexdata;
   EncoderFactory factory = EncoderFactory.get();
@@ -116,7 +116,7 @@
     }
     e.flush();
   }
-  
+
   static void generateComplexData(Encoder e) throws IOException {
     e.writeArrayStart();
     e.setItemCount(1);
@@ -136,7 +136,7 @@
     e.writeMapEnd();
     e.flush();
   }
-  
+
   @BeforeClass
   public static void generateLegacyData() throws IOException {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
@@ -147,7 +147,7 @@
     generateComplexData(e);
     complexdata = baos.toByteArray();
   }
-  
+
   @Test
   public void testBinaryEncoder() throws IOException {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
@@ -162,7 +162,7 @@
     Assert.assertEquals(complexdata.length, result2.length);
     Assert.assertArrayEquals(complexdata, result2);
   }
-  
+
   @Test
   public void testDirectBinaryEncoder() throws IOException {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
@@ -178,7 +178,7 @@
     Assert.assertArrayEquals(complexdata, result2);
   }
 
-  
+
   @Test
   public void testBlockingBinaryEncoder() throws IOException {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java
index 95729fe..f6cb76b 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java
@@ -47,33 +47,33 @@
     this.iDepth = dp;
     this.sInput = inp;
   }
-  
+
   private static class Tests {
     private final JsonParser parser;
     private final Decoder input;
     private final int depth;
     public Tests(int bufferSize, int depth, String input)
       throws IOException {
-  
+
       this.depth = depth;
       byte[] in = input.getBytes("UTF-8");
       JsonFactory f = new JsonFactory();
       JsonParser p = f.createJsonParser(
           new ByteArrayInputStream(input.getBytes("UTF-8")));
-      
+
       ByteArrayOutputStream os = new ByteArrayOutputStream();
       EncoderFactory factory = new EncoderFactory()
           .configureBlockSize(bufferSize);
       Encoder cos = factory.blockingBinaryEncoder(os, null);
       serialize(cos, p, os);
       cos.flush();
-      
+
       byte[] bb = os.toByteArray();
       // dump(bb);
       this.input = DecoderFactory.get().binaryDecoder(bb, null);
       this.parser =  f.createJsonParser(new ByteArrayInputStream(in));
     }
-    
+
     public void scan() throws IOException {
       Stack<S> countStack = new Stack<S>();
       long count = 0;
@@ -208,7 +208,7 @@
   private static class S {
     public final long count;
     public final boolean isArray;
-    
+
     public S(long count, boolean isArray) {
       this.count = count;
       this.isArray = isArray;
@@ -270,7 +270,7 @@
     }
     parser.skipChildren();
   }
- 
+
   private static void checkString(String s, Decoder input, int n)
     throws IOException {
     ByteBuffer buf = input.readBytes(null);
@@ -279,14 +279,14 @@
         buf.remaining(), UTF_8);
     assertEquals(s, s2);
   }
-  
+
   private static void serialize(Encoder cos, JsonParser p,
       ByteArrayOutputStream os)
     throws IOException {
     boolean[] isArray = new boolean[100];
     int[] counts = new int[100];
     int stackTop = -1;
-    
+
     while (p.nextToken() != null) {
       switch (p.getCurrentToken()) {
       case END_ARRAY:
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO2.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO2.java
index 6438a60..5bb6c84 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO2.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO2.java
@@ -38,7 +38,7 @@
   private final Decoder decoder;
   private final String calls;
   private Object[] values;
-  
+
   public TestBlockingIO2 (int bufferSize, int skipLevel, String calls)
     throws IOException {
 
@@ -50,13 +50,13 @@
 
     TestValidatingIO.generate(encoder, calls, values);
     encoder.flush();
-    
+
     byte[] bb = os.toByteArray();
-    
+
     decoder = DecoderFactory.get().binaryDecoder(bb, null);
     this.calls = calls;
   }
-    
+
   @Test
   public void testScan() throws IOException {
     TestValidatingIO.check(decoder, calls, values, -1);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
index 46a9025..4d16f16 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
@@ -41,7 +41,7 @@
     BinaryEncoder enc = factory.binaryEncoder(out, null);
     Assert.assertTrue(enc == factory.binaryEncoder(out, enc));
   }
-  
+
   @Test(expected=NullPointerException.class)
   public void testBadBinaryEncoderInit() {
     factory.binaryEncoder(null, null);
@@ -53,21 +53,21 @@
     BinaryEncoder reuse = null;
     reuse = factory.blockingBinaryEncoder(out, reuse);
     Assert.assertTrue(reuse == factory.blockingBinaryEncoder(out, reuse));
-    // comparison 
+    // comparison
   }
-  
+
   @Test(expected=NullPointerException.class)
   public void testBadBlockintBinaryEncoderInit() {
     factory.binaryEncoder(null, null);
   }
-  
+
   @Test
   public void testDirectBinaryEncoderInit() throws IOException {
     OutputStream out = new ByteArrayOutputStream();
     BinaryEncoder enc = factory.directBinaryEncoder(out, null);
     Assert.assertTrue(enc ==  factory.directBinaryEncoder(out, enc));
   }
-  
+
   @Test(expected=NullPointerException.class)
   public void testBadDirectBinaryEncoderInit() {
     factory.directBinaryEncoder(null, null);
@@ -82,12 +82,12 @@
         new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8));
     enc.configure(out);
   }
-  
+
   @Test(expected=NullPointerException.class)
   public void testBadJsonEncoderInitOS() throws IOException {
     factory.jsonEncoder(Schema.create(Type.INT), (OutputStream)null);
   }
-  
+
   @Test(expected=NullPointerException.class)
   public void testBadJsonEncoderInit() throws IOException {
     factory.jsonEncoder(Schema.create(Type.INT), (JsonGenerator)null);
@@ -119,7 +119,7 @@
     String value = "{\"b\": 2, \"a\": 1}";
     Schema schema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [" +
         "{\"name\": \"a\", \"type\": \"int\"}, {\"name\": \"b\", \"type\": \"int\"}" +
-    		"]}");
+        "]}");
     GenericDatumReader<Object> reader = new GenericDatumReader<Object>(schema);
     Decoder decoder = DecoderFactory.get().jsonDecoder(schema, value);
     Object o = reader.read(null, decoder);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestJsonDecoder.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestJsonDecoder.java
index 7946beb..4ac07eb 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestJsonDecoder.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestJsonDecoder.java
@@ -26,7 +26,7 @@
 import org.junit.Assert;
 
 public class TestJsonDecoder {
-  
+
   @Test public void testInt() throws Exception {
     checkNumeric("int", 1);
   }
@@ -44,7 +44,7 @@
   }
 
   private void checkNumeric(String type, Object value) throws Exception {
-    String def = 
+    String def =
       "{\"type\":\"record\",\"name\":\"X\",\"fields\":"
       +"[{\"type\":\""+type+"\",\"name\":\"n\"}]}";
     Schema schema = Schema.parse(def);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIO.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIO.java
index d5f1b06..3cdc7e5 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIO.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIO.java
@@ -51,7 +51,7 @@
     this.sJsRdrSchm = jsonReaderSchema;
     this.sRdrCls = readerCalls;
   }
-  
+
   @Test
   public void testIdentical() throws IOException {
     performTest(eEnc, iSkipL, sJsWrtSchm, sWrtCls, sJsWrtSchm, sWrtCls);
@@ -66,7 +66,7 @@
 
   private void performTest(Encoding encoding,
       int skipLevel, String jsonWriterSchema,
-      String writerCalls, 
+      String writerCalls,
       String jsonReaderSchema, String readerCalls)
   throws IOException {
     for (int i = 0; i < COUNT; i++) {
@@ -74,7 +74,7 @@
           jsonReaderSchema, readerCalls, encoding, skipLevel);
     }
   }
-  
+
   private void testOnce(String jsonWriterSchema,
       String writerCalls,
       String jsonReaderSchema,
@@ -83,7 +83,7 @@
       int skipLevel) throws IOException {
     Object[] values = TestValidatingIO.randomValues(writerCalls);
     Object[] expected = TestValidatingIO.randomValues(readerCalls);
-    
+
     Schema writerSchema = new Schema.Parser().parse(jsonWriterSchema);
     byte[] bytes = TestValidatingIO.make(writerSchema, writerCalls,
         values, encoding);
@@ -114,7 +114,7 @@
     Decoder vi = new ResolvingDecoder(wsc, rsc, bvi);
     TestValidatingIO.check(vi, calls, values, skipLevel);
   }
-  
+
   @Parameterized.Parameters
   public static Collection<Object[]> data2() {
     return Arrays.asList(TestValidatingIO.convertTo2dArray(encodings, skipLevels, testSchemas()));
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java
index e6377b5..b722e76 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java
@@ -190,8 +190,8 @@
           + "{\"name\":\"f3\", \"type\":\"double\"}]}", "BLD",
           new Object[] { true, 100L, 10.75d } },
         // Array of record with arrays.
-        { "{ \"type\": \"array\", \"items\":" +
-        		"{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        { "{ \"type\": \"array\", \"items\":" + 
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
             + "{\"name\":\"f0\", \"type\":\"boolean\"},"
             + "{\"name\":\"f1\", \"type\": {\"type\":\"array\", \"items\": \"boolean\" }}"
             + "]}}", "[c2sB[c2sBsB]sB[c3sBsBsB]]",
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestValidatingIO.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestValidatingIO.java
index bfec06d..792a987 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestValidatingIO.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestValidatingIO.java
@@ -48,10 +48,10 @@
     BLOCKING_BINARY,
     JSON,
   }
-  
+
   private static final Logger LOG =
       LoggerFactory.getLogger(TestValidatingIO.class);
-  
+
   private Encoding eEnc;
   private int iSkipL;
   private String sJsSch;
@@ -64,7 +64,7 @@
     this.sCl = cls;
   }
   private static final int COUNT = 1;
-  
+
   @Test
   public void testMain() throws IOException {
     for (int i = 0; i < COUNT; i++) {
@@ -98,7 +98,7 @@
       bvo = factory.jsonEncoder(sc, ba);
       break;
     }
-        
+
     Encoder vo = factory.validatingEncoder(sc, bvo);
     generate(vo, calls, values);
     vo.flush();
@@ -108,22 +108,22 @@
   public static class InputScanner {
     private final char[] chars;
     private int cpos = 0;
-    
+
     public InputScanner(char[] chars) {
       this.chars = chars;
     }
-    
+
     public boolean next() {
       if (cpos < chars.length) {
         cpos++;
       }
       return cpos != chars.length;
     }
-    
+
     public char cur() {
       return chars[cpos];
     }
-    
+
     public boolean isDone() {
       return cpos == chars.length;
     }
@@ -319,7 +319,7 @@
     Decoder vi = new ValidatingDecoder(sc, bvi);
     check(vi, calls, values, skipLevel);
   }
-  
+
   public static void check(Decoder vi, String calls,
       Object[] values, final int skipLevel) throws IOException {
     InputScanner cs = new InputScanner(calls.toCharArray());
@@ -523,16 +523,16 @@
   public static Collection<Object[]> data() {
     return Arrays.asList(convertTo2dArray(encodings, skipLevels, testSchemas()));
   }
-  
+
   private static Object[][] encodings = new Object[][] {
       { Encoding.BINARY }, { Encoding.BLOCKING_BINARY },
       { Encoding.JSON }
-    }; 
+    };
 
   private static Object[][] skipLevels = new Object[][] {
       { -1 }, { 0 }, { 1 }, { 2 },
   };
-  
+
   public static Object[][] convertTo2dArray(final Object[][]... values) {
     ArrayList<Object[]> ret = new ArrayList<Object[]>();
 
@@ -582,7 +582,7 @@
       }
     };
   }
-  
+
   /**
    * Concatenates the input sequences in order and forms a longer sequence.
    */
@@ -730,7 +730,7 @@
           + "{\"name\":\"f6\", \"type\":\"string\"},"
           + "{\"name\":\"f7\", \"type\":\"bytes\"}]}",
             "NBILFDS10b25" },
-        
+
         // record of records
         { "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
           + "{\"name\":\"f1\", \"type\":{\"type\":\"record\", "
@@ -796,14 +796,14 @@
 
         { "[\"boolean\", {\"type\":\"array\", \"items\":\"int\"} ]",
             "U1[c1sI]" },
-          
+
         // Recursion
         { "{\"type\": \"record\", \"name\": \"Node\", \"fields\": ["
           + "{\"name\":\"label\", \"type\":\"string\"},"
           + "{\"name\":\"children\", \"type\":"
           + "{\"type\": \"array\", \"items\": \"Node\" }}]}",
           "S10[c1sS10[]]" },
-          
+
         { "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
           + "{\"name\":\"value\", \"type\":[\"null\", \"string\","
           + "{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
@@ -822,16 +822,16 @@
           + "{\"name\":\"car\", \"type\":\"Lisp\"},"
           + "{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}",
           "U2U1S10U0N"},
-          
+
         // Deep recursion
         { "{\"type\": \"record\", \"name\": \"Node\", \"fields\": ["
           + "{\"name\":\"children\", \"type\":"
           + "{\"type\": \"array\", \"items\": \"Node\" }}]}",
           "[c1s[c1s[c1s[c1s[c1s[c1s[c1s[c1s[c1s[c1s[c1s[]]]]]]]]]]]]" },
-              
+
     };
   }
-  
+
   static void dump(byte[] bb) {
     int col = 0;
     for (byte b : bb) {
@@ -844,17 +844,17 @@
     System.out.println();
   }
 
-  static void print(Encoding encoding, int skipLevel, Schema writerSchema, 
+  static void print(Encoding encoding, int skipLevel, Schema writerSchema,
       Schema readerSchema, Object[] writtenValues, Object[] expectedValues) {
-    LOG.debug("{} Skip Level {}", encoding, skipLevel); 
+    LOG.debug("{} Skip Level {}", encoding, skipLevel);
     printSchemaAndValues("Writer", writerSchema, writtenValues);
     printSchemaAndValues("Reader", readerSchema, expectedValues);
   }
 
   private static void printSchemaAndValues(String schemaType, Schema schema, Object[] values) {
-    LOG.debug("{} Schema {}", schemaType, schema); 
+    LOG.debug("{} Schema {}", schemaType, schema);
     for (Object value : values) {
       LOG.debug("{} -> {}", value, value.getClass().getSimpleName());
     }
-  }  
+  }
 }
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator.java b/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator.java
index 142d104..2d9a83e 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator.java
@@ -47,7 +47,7 @@
 public class TestResolvingGrammarGenerator {
   private final Schema schema;
   private final JsonNode data;
-  
+
   public TestResolvingGrammarGenerator(String jsonSchema, String jsonData)
     throws IOException {
     this.schema = Schema.parse(jsonSchema);
@@ -61,9 +61,9 @@
   public void test() throws IOException {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     EncoderFactory factory = EncoderFactory.get();
-    Encoder e = factory.validatingEncoder(schema, 
+    Encoder e = factory.validatingEncoder(schema,
         factory.binaryEncoder(baos, null));
-    
+
     ResolvingGrammarGenerator.encode(e, schema, data);
     e.flush();
   }
@@ -91,7 +91,7 @@
           "Found ns.MyRecord, expecting ns.MyRecord, missing required field field2", typeException.getMessage());
     }
   }
-  
+
   @Parameterized.Parameters
   public static Collection<Object[]> data() {
     Collection<Object[]> ret = Arrays.asList(
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator2.java b/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator2.java
index 1b5ac6f..ea9ed1a 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator2.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator2.java
@@ -26,7 +26,7 @@
 import org.junit.Test;
 
 /** ResolvingGrammarGenerator tests that are not Parameterized.*/
-public class TestResolvingGrammarGenerator2 {  
+public class TestResolvingGrammarGenerator2 {
   @Test public void testFixed() throws java.io.IOException {
     new ResolvingGrammarGenerator().generate
       (Schema.createFixed("MyFixed", null, null, 10),
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestByteBuffer.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestByteBuffer.java
index e48fd14..602d39e 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestByteBuffer.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestByteBuffer.java
@@ -67,9 +67,9 @@
   @Test public void test() throws Exception{
     Schema schema = ReflectData.get().getSchema(X.class);
     ByteArrayOutputStream bout = new ByteArrayOutputStream();
-    writeOneXAsAvro(schema, bout);		
+    writeOneXAsAvro(schema, bout);
     X record = readOneXFromAvro(schema, bout);
-		
+
     String expected = getmd5(content);
     String actual = getmd5(record.content);
     assertEquals("md5 for result differed from input",expected,actual);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java
index 41f508c..3267529 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java
@@ -59,7 +59,7 @@
     String testType = "NonStringKeysTest";
     Company [] entityObjs = {entityObj1, entityObj2};
     byte[] bytes = testSerialization(testType, entityObj1, entityObj2);
-    List<GenericRecord> records = 
+    List<GenericRecord> records =
       (List<GenericRecord>) testGenericDatumRead(testType, bytes, entityObjs);
 
     GenericRecord record = records.get(0);
@@ -76,7 +76,7 @@
     Object id = ((GenericRecord)key).get("id");
     Object name = ((GenericRecord)value).get("name").toString();
     assertTrue (
-      (id.equals(1) && name.equals("Foo")) || 
+      (id.equals(1) && name.equals("Foo")) ||
       (id.equals(2) && name.equals("Bar"))
     );
 
@@ -92,7 +92,7 @@
       id = e.getKey().getId();
       name = e.getValue().getName();
       assertTrue (
-        (id.equals(1) && name.equals("Foo")) || 
+        (id.equals(1) && name.equals("Foo")) ||
         (id.equals(2) && name.equals("Bar"))
       );
     }
@@ -103,7 +103,7 @@
     GenericRecord jsonRecord = testJsonDecoder(testType, jsonBytes, entityObj1);
     assertEquals ("JSON decoder output not same as Binary Decoder", record, jsonRecord);
   }
-  
+
   @Test
   public void testNonStringMapKeysInNestedMaps() throws Exception {
 
@@ -119,7 +119,7 @@
     Object employees = record.get("employees");
     assertTrue ("Unable to read 'employees' map", employees instanceof GenericArray);
     GenericArray employeesMapArray = ((GenericArray)employees);
-    
+
     Object employeeMapElement = employeesMapArray.get(0);
     assertTrue (employeeMapElement instanceof GenericRecord);
     Object key = ((GenericRecord)employeeMapElement).get(ReflectData.NS_MAP_KEY);
@@ -129,11 +129,11 @@
     GenericRecord employeeInfo = (GenericRecord)value;
     Object name = employeeInfo.get("name").toString();
     assertEquals ("Foo", name);
-    
+
     Object companyMap = employeeInfo.get("companyMap");
     assertTrue (companyMap instanceof GenericArray);
     GenericArray companyMapArray = (GenericArray)companyMap;
-    
+
     Object companyMapElement = companyMapArray.get(0);
     assertTrue (companyMapElement instanceof GenericRecord);
     key = ((GenericRecord)companyMapElement).get(ReflectData.NS_MAP_KEY);
@@ -142,7 +142,7 @@
     if (value instanceof Utf8)
       value = ((Utf8)value).toString();
     assertEquals ("CompanyFoo", value);
-    
+
     List<Company2> records2 =
       (List<Company2>) testReflectDatumRead(testType, bytes, entityObjs);
     Company2 co = records2.get(0);
@@ -180,7 +180,7 @@
     Object map1obj = record.get("map1");
     assertTrue ("Unable to read map1", map1obj instanceof GenericArray);
     GenericArray map1array = ((GenericArray)map1obj);
-    
+
     Object map1element = map1array.get(0);
     assertTrue (map1element instanceof GenericRecord);
     Object key = ((GenericRecord)map1element).get(ReflectData.NS_MAP_KEY);
@@ -190,7 +190,7 @@
 
     Object map2obj = record.get("map2");
     assertEquals (map1obj, map2obj);
-    
+
     List<SameMapSignature> records2 =
       (List<SameMapSignature>) testReflectDatumRead(testType, bytes, entityObjs);
     SameMapSignature entity = records2.get(0);
@@ -221,9 +221,9 @@
     byte[] jsonBytes = testJsonEncoder (testType, entityObj1);
     assertNotNull ("Unable to serialize using jsonEncoder", jsonBytes);
     GenericRecord jsonRecord = testJsonDecoder(testType, jsonBytes, entityObj1);
-    assertEquals ("JSON decoder output not same as Binary Decoder", 
+    assertEquals ("JSON decoder output not same as Binary Decoder",
       record.get("map1"), jsonRecord.get("map1"));
-    assertEquals ("JSON decoder output not same as Binary Decoder", 
+    assertEquals ("JSON decoder output not same as Binary Decoder",
       record.get("map2"), jsonRecord.get("map2"));
   }
 
@@ -280,7 +280,7 @@
   /**
    * Test that non-string map-keys are readable through ReflectDatumReader
    * This methoud should form the original map and should not return any
-   * array of {key, value} as done by {@link #testGenericDatumRead()} 
+   * array of {key, value} as done by {@link #testGenericDatumRead()}
    */
   private <T> List<T> testReflectDatumRead
     (String testType, byte[] bytes, T ... entityObjs) throws IOException {
@@ -349,15 +349,15 @@
     Company2 co = new Company2 ();
     HashMap<Integer, EmployeeInfo2> employees = new HashMap<Integer, EmployeeInfo2>();
     co.setEmployees(employees);
-    
+
     EmployeeId2 empId = new EmployeeId2(1);
     EmployeeInfo2 empInfo = new EmployeeInfo2("Foo");
     HashMap<Integer, String> companyMap = new HashMap<Integer, String>();
     empInfo.setCompanyMap(companyMap);
     companyMap.put(14, "CompanyFoo");
-    
+
     employees.put(11, empInfo);
-    
+
     return co;
   }
 
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java
index 6c29ccc..a281a06 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java
@@ -50,9 +50,9 @@
 import org.junit.Test;
 
 public class TestReflect {
-  
+
   EncoderFactory factory = new EncoderFactory();
-  
+
   // test primitive type inference
   @Test public void testVoid() {
     check(Void.TYPE, "\"null\"");
@@ -154,13 +154,13 @@
       mapField.put("foo", "bar");
       listField.add("foo");
     }
-    
+
     @Override
     public boolean equals(Object o) {
       if (!(o instanceof R1)) return false;
       R1 that = (R1)o;
       return mapField.equals(that.mapField)
-        && Arrays.equals(this.arrayField, that.arrayField) 
+        && Arrays.equals(this.arrayField, that.arrayField)
         &&  listField.equals(that.listField);
     }
   }
@@ -179,7 +179,7 @@
           "{\"type\":\"array\",\"items\":\"string\""
           +",\"java-class\":\"java.util.List\"}");
   }
-  
+
   @Test public void testR1() throws Exception {
     checkReadWrite(new R1());
   }
@@ -188,12 +188,12 @@
   public static class R2 {
     private String[] arrayField;
     private Collection<String> collectionField;
-    
+
     @Override
     public boolean equals(Object o) {
       if (!(o instanceof R2)) return false;
       R2 that = (R2)o;
-      return Arrays.equals(this.arrayField, that.arrayField) 
+      return Arrays.equals(this.arrayField, that.arrayField)
         &&  collectionField.equals(that.collectionField);
     }
   }
@@ -209,7 +209,7 @@
   // test array i/o of unboxed type
   public static class R3 {
     private int[] intArray;
-    
+
     @Override
     public boolean equals(Object o) {
       if (!(o instanceof R3)) return false;
@@ -230,7 +230,7 @@
     public short[] shorts;
     public byte b;
     public char c;
-    
+
     @Override
     public boolean equals(Object o) {
       if (!(o instanceof R4)) return false;
@@ -335,7 +335,7 @@
       return this.text.equals(((R10)o).text);
     }
   }
-  
+
   @Test public void testR10() throws Exception {
     Schema r10Schema = ReflectData.get().getSchema(R10.class);
     assertEquals(Schema.Type.STRING, r10Schema.getType());
@@ -354,7 +354,7 @@
       return this.text.equals(that.text);
     }
   }
-  
+
   @Test public void testR11() throws Exception {
     Schema r11Record = ReflectData.get().getSchema(R11.class);
     assertEquals(Schema.Type.RECORD, r11Record.getType());
@@ -414,7 +414,7 @@
                  ("{\"type\":\"array\",\"items\":[\"null\",\"string\"]}"),
                  s.getField("strings").schema());
   }
-    
+
   @AvroSchema("\"null\"")                          // record
   public class R13 {}
 
@@ -422,7 +422,7 @@
     Schema s = ReflectData.get().getSchema(R13.class);
     assertEquals(Schema.Type.NULL, s.getType());
   }
-    
+
   public interface P4 {
     @AvroSchema("\"int\"")                        // message value
     Object foo(@AvroSchema("\"int\"")Object x);   // message param
@@ -506,45 +506,45 @@
           +"{\"name\":\"a\",\"type\":\"int\"},"
           +"{\"name\":\"b\",\"type\":\"long\"}]}");
   }
-  
+
   public static class RAvroIgnore { @AvroIgnore int a; }
   @Test public void testAnnotationAvroIgnore() throws Exception {
     check(RAvroIgnore.class, "{\"type\":\"record\",\"name\":\"RAvroIgnore\",\"namespace\":"
           +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":[]}");
   }
-  
+
   public static class RAvroMeta { @AvroMeta(key="K", value="V") int a; }
   @Test public void testAnnotationAvroMeta() throws Exception {
     check(RAvroMeta.class, "{\"type\":\"record\",\"name\":\"RAvroMeta\",\"namespace\":"
-          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":[" 
+          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":["
           +"{\"name\":\"a\",\"type\":\"int\",\"K\":\"V\"}]}");
   }
-  
+
   public static class RAvroName { @AvroName("b") int a; }
   @Test public void testAnnotationAvroName() throws Exception {
     check(RAvroName.class, "{\"type\":\"record\",\"name\":\"RAvroName\",\"namespace\":"
-          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":[" 
+          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":["
           +"{\"name\":\"b\",\"type\":\"int\"}]}");
   }
-  
+
   public static class RAvroNameCollide { @AvroName("b") int a; int b; }
   @Test(expected=Exception.class)
   public void testAnnotationAvroNameCollide() throws Exception {
     check(RAvroNameCollide.class, "{\"type\":\"record\",\"name\":\"RAvroNameCollide\",\"namespace\":"
-          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":[" 
-          +"{\"name\":\"b\",\"type\":\"int\"}," 
+          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":["
+          +"{\"name\":\"b\",\"type\":\"int\"},"
           +"{\"name\":\"b\",\"type\":\"int\"}]}");
   }
-  
+
   public static class RAvroStringableField { @Stringable int a; }
   public void testAnnotationAvroStringableFields() throws Exception {
     check(RAvroStringableField.class, "{\"type\":\"record\",\"name\":\"RAvroNameCollide\",\"namespace\":"
-          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":[" 
+          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":["
           +"{\"name\":\"a\",\"type\":\"String\"}]}");
   }
-  
-  
-  
+
+
+
 
   private void check(Object o, String schemaJson) {
     check(o.getClass(), schemaJson);
@@ -557,14 +557,14 @@
   @Test
   public void testRecordIO() throws IOException {
     Schema schm = ReflectData.get().getSchema(SampleRecord.class);
-    ReflectDatumWriter<SampleRecord> writer = 
+    ReflectDatumWriter<SampleRecord> writer =
       new ReflectDatumWriter<SampleRecord>(schm);
     ByteArrayOutputStream out = new ByteArrayOutputStream();
     SampleRecord record = new SampleRecord();
     record.x = 5;
     record.y = 10;
     writer.write(record, factory.directBinaryEncoder(out, null));
-    ReflectDatumReader<SampleRecord> reader = 
+    ReflectDatumReader<SampleRecord> reader =
       new ReflectDatumReader<SampleRecord>(schm);
     SampleRecord decoded =
       reader.read(null, DecoderFactory.get().binaryDecoder(
@@ -575,19 +575,19 @@
   public static class AvroEncRecord {
     @AvroEncode(using=DateAsLongEncoding.class)
     java.util.Date date;
-    
-    @Override 
+
+    @Override
     public boolean equals(Object o) {
       if (!(o instanceof AvroEncRecord)) return false;
       return date.equals(((AvroEncRecord)o).date);
     }
   }
-  
+
   public static class multipleAnnotationRecord {
     @AvroIgnore
     @Stringable
     Integer i1;
-    
+
     @AvroIgnore
     @Nullable
     Integer i2;
@@ -595,27 +595,27 @@
     @AvroIgnore
     @AvroName("j")
     Integer i3;
-    
+
     @AvroIgnore
     @AvroEncode(using=DateAsLongEncoding.class)
     java.util.Date i4;
-    
+
     @Stringable
     @Nullable
     Integer i5;
-    
+
     @Stringable
     @AvroName("j6")
-    Integer i6 = 6;    
-    
+    Integer i6 = 6;
+
     @Stringable
     @AvroEncode(using=DateAsLongEncoding.class)
     java.util.Date i7 = new java.util.Date(7L);
-    
+
     @Nullable
     @AvroName("j8")
-    Integer i8;    
-      
+    Integer i8;
+
     @Nullable
     @AvroEncode(using=DateAsLongEncoding.class)
     java.util.Date i9;
@@ -630,11 +630,11 @@
     @AvroEncode(using=DateAsLongEncoding.class)
     java.util.Date i11;
   }
-  
+
   @Test
   public void testMultipleAnnotations() throws IOException {
     Schema schm = ReflectData.get().getSchema(multipleAnnotationRecord.class);
-    ReflectDatumWriter<multipleAnnotationRecord> writer = 
+    ReflectDatumWriter<multipleAnnotationRecord> writer =
       new ReflectDatumWriter<multipleAnnotationRecord>(schm);
     ByteArrayOutputStream out = new ByteArrayOutputStream();
     multipleAnnotationRecord record = new multipleAnnotationRecord();
@@ -649,9 +649,9 @@
     record.i9 = new java.util.Date(9L);
     record.i10 = new java.util.Date(10L);
     record.i11 = new java.util.Date(11L);
-    
+
     writer.write(record, factory.directBinaryEncoder(out, null));
-    ReflectDatumReader<multipleAnnotationRecord> reader = 
+    ReflectDatumReader<multipleAnnotationRecord> reader =
       new ReflectDatumReader<multipleAnnotationRecord>(schm);
       multipleAnnotationRecord decoded =
       reader.read(new multipleAnnotationRecord(), DecoderFactory.get().binaryDecoder(
@@ -668,8 +668,8 @@
     assertTrue(decoded.i10.getTime() == 10);
     assertTrue(decoded.i11.getTime() == 11);
   }
-  
-  
+
+
   @Test
   public void testAvroEncodeInducing() throws IOException {
     Schema schm = ReflectData.get().getSchema(AvroEncRecord.class);
@@ -677,29 +677,29 @@
       "\":\"org.apache.avro.reflect.TestReflect$\",\"fields\":[{\"name\":\"date\"," +
       "\"type\":{\"type\":\"long\",\"CustomEncoding\":\"DateAsLongEncoding\"}}]}");
   }
-  
+
   @Test
   public void testAvroEncodeIO() throws IOException {
     Schema schm = ReflectData.get().getSchema(AvroEncRecord.class);
-    ReflectDatumWriter<AvroEncRecord> writer = 
+    ReflectDatumWriter<AvroEncRecord> writer =
       new ReflectDatumWriter<AvroEncRecord>(schm);
     ByteArrayOutputStream out = new ByteArrayOutputStream();
     AvroEncRecord record = new AvroEncRecord();
     record.date = new java.util.Date(948833323L);
     writer.write(record, factory.directBinaryEncoder(out, null));
-    ReflectDatumReader<AvroEncRecord> reader = 
+    ReflectDatumReader<AvroEncRecord> reader =
       new ReflectDatumReader<AvroEncRecord>(schm);
     AvroEncRecord decoded =
       reader.read(new AvroEncRecord(), DecoderFactory.get().binaryDecoder(
           out.toByteArray(), null));
     assertEquals(record, decoded);
   }
-  
+
   @Test
   public void testRecordWithNullIO() throws IOException {
     ReflectData reflectData = ReflectData.AllowNull.get();
     Schema schm = reflectData.getSchema(AnotherSampleRecord.class);
-    ReflectDatumWriter<AnotherSampleRecord> writer = 
+    ReflectDatumWriter<AnotherSampleRecord> writer =
       new ReflectDatumWriter<AnotherSampleRecord>(schm);
     ByteArrayOutputStream out = new ByteArrayOutputStream();
     // keep record.a null and see if that works
@@ -709,7 +709,7 @@
     AnotherSampleRecord b = new AnotherSampleRecord(10);
     writer.write(b, e);
     e.flush();
-    ReflectDatumReader<AnotherSampleRecord> reader = 
+    ReflectDatumReader<AnotherSampleRecord> reader =
       new ReflectDatumReader<AnotherSampleRecord>(schm);
     ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
     Decoder d = DecoderFactory.get().binaryDecoder(in, null);
@@ -761,7 +761,7 @@
         return false;
       return true;
     }
-    
+
     public static class AnotherSampleRecord {
       private Integer a = null;
       private SampleRecord s = null;
@@ -823,7 +823,7 @@
   }
 
   @Test(expected=AvroTypeException.class)
-  public void testOverloadedMethod() { 
+  public void testOverloadedMethod() {
     ReflectData.get().getProtocol(P3.class);
   }
 
@@ -860,17 +860,17 @@
     // test that this instance can be written & re-read
     checkBinary(schema, record);
   }
-  
+
   @Test
   public void testPrimitiveArray() throws Exception {
     testPrimitiveArrays(false);
   }
-  
+
   @Test
   public void testPrimitiveArrayBlocking() throws Exception {
     testPrimitiveArrays(true);
   }
-  
+
   private void testPrimitiveArrays(boolean blocking) throws Exception {
     testPrimitiveArray(boolean.class, blocking);
     testPrimitiveArray(byte.class, blocking);
@@ -984,7 +984,7 @@
       Object datum, boolean equals) throws IOException {
     checkBinary(reflectData, schema, datum, equals, false);
   }
-  
+
   private static void checkBinary(ReflectData reflectData, Schema schema,
       Object datum, boolean equals, boolean blocking) throws IOException {
     ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(schema);
@@ -1026,20 +1026,20 @@
   @AvroAlias(alias="a", space="")
   private static class AliasB { }
   @AvroAlias(alias="a")
-  private static class AliasC { }  
-  
+  private static class AliasC { }
+
   @Test
   public void testAvroAlias() {
     check(AliasA.class, "{\"type\":\"record\",\"name\":\"AliasA\",\"namespace\":\"org.apache.avro.reflect.TestReflect$\",\"fields\":[],\"aliases\":[\"b.a\"]}");
     check(AliasB.class, "{\"type\":\"record\",\"name\":\"AliasB\",\"namespace\":\"org.apache.avro.reflect.TestReflect$\",\"fields\":[],\"aliases\":[\"a\"]}");
-    check(AliasC.class, "{\"type\":\"record\",\"name\":\"AliasC\",\"namespace\":\"org.apache.avro.reflect.TestReflect$\",\"fields\":[],\"aliases\":[\"a\"]}");    
+    check(AliasC.class, "{\"type\":\"record\",\"name\":\"AliasC\",\"namespace\":\"org.apache.avro.reflect.TestReflect$\",\"fields\":[],\"aliases\":[\"a\"]}");
   }
 
   private static class DefaultTest {
     @AvroDefault("1")
     int foo;
-  }  
-  
+  }
+
   @Test
   public void testAvroDefault() {
     check(DefaultTest.class,
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectionUtil.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectionUtil.java
index 4414d20..9d017f2 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectionUtil.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectionUtil.java
@@ -35,13 +35,13 @@
     Class<?> testerClass = cl.loadClass(Tester.class.getName());
     testerClass.getDeclaredMethod("checkUnsafe").invoke(testerClass.newInstance());
   }
-  
+
   public static final class Tester {
     public Tester() {}
     public void checkUnsafe() {
       ReflectionUtil.getFieldAccess();
     }
-    
+
   }
 
   private static final class NoUnsafe extends ClassLoader {
diff --git a/lang/java/avro/src/test/java/org/apache/avro/util/CaseFinder.java b/lang/java/avro/src/test/java/org/apache/avro/util/CaseFinder.java
index ddfc2a8..2f24a74 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/util/CaseFinder.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/util/CaseFinder.java
@@ -187,7 +187,7 @@
     // Determine if this is a single-line heredoc, and process if it is
     String singleLineText = m.group(2);
     if (singleLineText.length() != 0) {
-      if (! singleLineText.startsWith(" ")) 
+      if (! singleLineText.startsWith(" "))
         throw new IOException("Single-line heredoc missing initial space (\""+docStart+"\")");
       return singleLineText.substring(1);
     }
diff --git a/lang/java/avro/src/test/java/org/apache/avro/util/TestUtf8.java b/lang/java/avro/src/test/java/org/apache/avro/util/TestUtf8.java
index 758e3e5..2c5e771 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/util/TestUtf8.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/util/TestUtf8.java
@@ -35,9 +35,9 @@
       assertEquals(bs[i], u.getBytes()[i]);
     }
   }
-  
+
   @Test public void testArrayReusedWhenLargerThanRequestedSize() throws UnsupportedEncodingException {
-    byte[] bs = "55555".getBytes("UTF-8");    
+    byte[] bs = "55555".getBytes("UTF-8");
     Utf8 u = new Utf8(bs);
     assertEquals(5, u.getByteLength());
     byte[] content = u.getBytes();
diff --git a/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/ProtocolTask.java b/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/ProtocolTask.java
index 23f8d7e..36bf67b 100644
--- a/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/ProtocolTask.java
+++ b/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/ProtocolTask.java
@@ -38,22 +38,22 @@
   private StringType stringType = StringType.CharSequence;
 
   private final ArrayList<FileSet> filesets = new ArrayList<FileSet>();
-  
+
   /** Set the schema file. */
   public void setFile(File file) { this.src = file; }
-  
+
   /** Set the output directory */
   public void setDestdir(File dir) { this.dest = dir; }
-  
+
   /** Set the string type. */
   public void setStringType(StringType type) { this.stringType = type; }
-  
+
   /** Get the string type. */
   public StringType getStringType() { return this.stringType; }
-  
+
   /** Add a fileset. */
   public void addFileset(FileSet set) { filesets.add(set); }
-  
+
   /** Run the compiler. */
   @Override
   public void execute() {
@@ -74,7 +74,7 @@
       }
     }
   }
-  
+
   protected void doCompile(File src, File dir) throws IOException {
     Protocol protocol = Protocol.parse(src);
     SpecificCompiler compiler = new SpecificCompiler(protocol);
diff --git a/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SpecificCompiler.java b/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SpecificCompiler.java
index 6faf368..823a2ef 100644
--- a/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SpecificCompiler.java
+++ b/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SpecificCompiler.java
@@ -103,7 +103,7 @@
   }
 
   /* Reserved words for accessor/mutator methods */
-  private static final Set<String> ACCESSOR_MUTATOR_RESERVED_WORDS = 
+  private static final Set<String> ACCESSOR_MUTATOR_RESERVED_WORDS =
       new HashSet<String>(Arrays.asList(new String[] {
             "class", "schema", "classSchema"
           }));
@@ -111,7 +111,7 @@
     // Add reserved words to accessor/mutator reserved words
     ACCESSOR_MUTATOR_RESERVED_WORDS.addAll(RESERVED_WORDS);
   }
-  
+
   /* Reserved words for error types */
   private static final Set<String> ERROR_RESERVED_WORDS = new HashSet<String>(
       Arrays.asList(new String[] { "message", "cause" }));
@@ -119,14 +119,14 @@
     // Add accessor/mutator reserved words to error reserved words
     ERROR_RESERVED_WORDS.addAll(ACCESSOR_MUTATOR_RESERVED_WORDS);
   }
-  
-  private static final String FILE_HEADER = 
+
+  private static final String FILE_HEADER =
       "/**\n" +
       " * Autogenerated by Avro\n" +
-      " * \n" +
+      " *\n" +
       " * DO NOT EDIT DIRECTLY\n" +
       " */\n";
-  
+
   public SpecificCompiler(Protocol protocol) {
     this();
     // enqueue all types
@@ -141,7 +141,7 @@
     enqueue(schema);
     this.protocol = null;
   }
-  
+
   SpecificCompiler() {
     this.templateDir =
       System.getProperty("org.apache.avro.specific.templates",
@@ -206,7 +206,7 @@
     velocityEngine.addProperty("resource.loader", "class, file");
     velocityEngine.addProperty("class.resource.loader.class",
         "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
-    velocityEngine.addProperty("file.resource.loader.class", 
+    velocityEngine.addProperty("file.resource.loader.class",
         "org.apache.velocity.runtime.resource.loader.FileResourceLoader");
     velocityEngine.addProperty("file.resource.loader.path", "/, .");
     velocityEngine.setProperty("runtime.references.strict", true);
@@ -547,7 +547,7 @@
     default: throw new RuntimeException("Unknown string type: "+stringType);
    }
   }
- 
+
   private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL);
 
   /** Utility for template use.  Returns the java type for a Schema. */
@@ -628,7 +628,7 @@
     b.append("\"");                               // final quote
     return b.toString();
   }
-  
+
   /** Utility for template use.  Escapes quotes and backslashes. */
   public static String javaEscape(Object o) {
       return o.toString().replace("\\","\\\\").replace("\"", "\\\"");
@@ -638,7 +638,7 @@
   public static String escapeForJavadoc(String s) {
       return s.replace("*/", "*&#47;");
   }
-  
+
   /** Utility for template use.  Returns empty string for null. */
   public static String nullToEmpty(String x) {
     return x == null ? "" : x;
@@ -648,19 +648,19 @@
   public static String mangle(String word) {
     return mangle(word, false);
   }
-  
+
   /** Utility for template use.  Adds a dollar sign to reserved words. */
   public static String mangle(String word, boolean isError) {
     return mangle(word, isError ? ERROR_RESERVED_WORDS : RESERVED_WORDS);
   }
-  
+
   /** Utility for template use.  Adds a dollar sign to reserved words. */
   public static String mangle(String word, Set<String> reservedWords) {
     return mangle(word, reservedWords, false);
   }
-  
+
   /** Utility for template use.  Adds a dollar sign to reserved words. */
-  public static String mangle(String word, Set<String> reservedWords, 
+  public static String mangle(String word, Set<String> reservedWords,
       boolean isMethod) {
     if (word.contains(".")) {
       // If the 'word' is really a full path of a class we must mangle just the classname
@@ -669,15 +669,15 @@
       String className = word.substring(lastDot + 1);
       return packageName + mangle(className, reservedWords, isMethod);
     }
-    if (reservedWords.contains(word) || 
+    if (reservedWords.contains(word) ||
         (isMethod && reservedWords.contains(
-            Character.toLowerCase(word.charAt(0)) + 
+            Character.toLowerCase(word.charAt(0)) +
             ((word.length() > 1) ? word.substring(1) : "")))) {
       return word + "$";
     }
     return word;
   }
-  
+
   /** Utility for use by templates. Return schema fingerprint as a long. */
   public static long fingerprint64(Schema schema) {
     return SchemaNormalization.parsingFingerprint64(schema);
@@ -692,7 +692,7 @@
   public static String generateGetMethod(Schema schema, Field field) {
     return generateMethodName(schema, field, "get", "");
   }
-  
+
   /**
    * Generates the name of a field mutator method.
    * @param schema the schema in which the field is defined.
@@ -702,7 +702,7 @@
   public static String generateSetMethod(Schema schema, Field field) {
     return generateMethodName(schema, field, "set", "");
   }
-  
+
   /**
    * Generates the name of a field "has" method.
    * @param schema the schema in which the field is defined.
@@ -712,7 +712,7 @@
   public static String generateHasMethod(Schema schema, Field field) {
     return generateMethodName(schema, field, "has", "");
   }
-  
+
   /**
    * Generates the name of a field "clear" method.
    * @param schema the schema in which the field is defined.
@@ -722,7 +722,7 @@
   public static String generateClearMethod(Schema schema, Field field) {
     return generateMethodName(schema, field, "clear", "");
   }
-  
+
   /** Utility for use by templates. Does this schema have a Builder method? */
   public static boolean hasBuilder(Schema schema) {
     switch (schema.getType()) {
@@ -779,20 +779,20 @@
    * @param postfix method name postfix, e.g. "" or "Builder".
    * @return the generated method name.
    */
-  private static String generateMethodName(Schema schema, Field field, 
+  private static String generateMethodName(Schema schema, Field field,
       String prefix, String postfix) {
 
-    // Check for the special case in which the schema defines two fields whose 
+    // Check for the special case in which the schema defines two fields whose
     // names are identical except for the case of the first character:
     char firstChar = field.name().charAt(0);
     String conflictingFieldName = (Character.isLowerCase(firstChar) ?
         Character.toUpperCase(firstChar) : Character.toLowerCase(firstChar)) +
         (field.name().length() > 1 ? field.name().substring(1) : "");
     boolean fieldNameConflict = schema.getField(conflictingFieldName) != null;
-    
+
     StringBuilder methodBuilder = new StringBuilder(prefix);
-    String fieldName = mangle(field.name(), 
-        schema.isError() ? ERROR_RESERVED_WORDS : 
+    String fieldName = mangle(field.name(),
+        schema.isError() ? ERROR_RESERVED_WORDS :
           ACCESSOR_MUTATOR_RESERVED_WORDS, true);
 
     boolean nextCharToUpper = true;
@@ -809,7 +809,7 @@
       }
     }
     methodBuilder.append(postfix);
-    
+
     // If there is a field name conflict append $0 or $1
     if (fieldNameConflict) {
       if (methodBuilder.charAt(methodBuilder.length() - 1) != '$') {
@@ -820,7 +820,7 @@
 
     return methodBuilder.toString();
   }
-  
+
   /** Tests whether an unboxed Java type can be set to null */
   public static boolean isUnboxedJavaTypeNullable(Schema schema) {
     switch (schema.getType()) {
@@ -838,7 +838,7 @@
     //compileSchema(new File(args[0]), new File(args[1]));
     compileProtocol(new File(args[0]), new File(args[1]));
   }
-  
+
   public static final class Slf4jLogChute implements LogChute {
     private Logger logger = LoggerFactory.getLogger("AvroVelocityLogChute");
     @Override
diff --git a/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/enum.vm b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/enum.vm
index 2056b1d..2117cd4 100644
--- a/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/enum.vm
+++ b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/enum.vm
@@ -16,8 +16,8 @@
 ## limitations under the License.
 ##
 #if ($schema.getNamespace())
-package $schema.getNamespace();  
-#end  
+package $schema.getNamespace();
+#end
 @SuppressWarnings("all")
 #if ($schema.getDoc())
 /** $schema.getDoc() */
@@ -26,7 +26,7 @@
 @$annotation
 #end
 @org.apache.avro.specific.AvroGenerated
-public enum ${this.mangle($schema.getName())} { 
+public enum ${this.mangle($schema.getName())} {
   #foreach ($symbol in ${schema.getEnumSymbols()})${this.mangle($symbol)}#if ($velocityHasNext), #end#end
   ;
   public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("${this.javaEscape($schema.toString())}");
diff --git a/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/fixed.vm b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/fixed.vm
index aff3597..b19e1b1 100644
--- a/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/fixed.vm
+++ b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/fixed.vm
@@ -16,7 +16,7 @@
 ## limitations under the License.
 ##
 #if ($schema.getNamespace())
-package $schema.getNamespace();  
+package $schema.getNamespace();
 #end
 @SuppressWarnings("all")
 #if ($schema.getDoc())
@@ -32,22 +32,22 @@
   public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("${this.javaEscape($schema.toString())}");
   public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
   public org.apache.avro.Schema getSchema() { return SCHEMA$; }
-  
+
   /** Creates a new ${this.mangle($schema.getName())} */
   public ${this.mangle($schema.getName())}() {
     super();
   }
-  
+
   /**
    * Creates a new ${this.mangle($schema.getName())} with the given bytes.
-   * @param bytes The bytes to create the new ${this.mangle($schema.getName())}. 
+   * @param bytes The bytes to create the new ${this.mangle($schema.getName())}.
    */
   public ${this.mangle($schema.getName())}(byte[] bytes) {
     super(bytes);
   }
 
   private static final org.apache.avro.io.DatumWriter
-    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);  
+    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);
 
   @Override public void writeExternal(java.io.ObjectOutput out)
     throws java.io.IOException {
@@ -55,7 +55,7 @@
   }
 
   private static final org.apache.avro.io.DatumReader
-    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);  
+    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);
 
   @Override public void readExternal(java.io.ObjectInput in)
     throws java.io.IOException {
diff --git a/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/record.vm b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/record.vm
index d0c1968..3e26df5 100644
--- a/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/record.vm
+++ b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/record.vm
@@ -16,7 +16,7 @@
 ## limitations under the License.
 ##
 #if ($schema.getNamespace())
-package $schema.getNamespace();  
+package $schema.getNamespace();
 #end
 
 import org.apache.avro.specific.SpecificData;
@@ -47,7 +47,7 @@
   public ${this.mangle($schema.getName())}() {
     super();
   }
-  
+
   public ${this.mangle($schema.getName())}(Object value) {
     super(value);
   }
@@ -59,14 +59,14 @@
   public ${this.mangle($schema.getName())}(Object value, Throwable cause) {
     super(value, cause);
   }
-  
+
 #else
-#if ($schema.getFields().size() > 0)  
+#if ($schema.getFields().size() > 0)
 
   /**
    * Default constructor.  Note that this does not initialize fields
    * to their default values from the schema.  If that is desired then
-   * one should use <code>newBuilder()</code>. 
+   * one should use <code>newBuilder()</code>.
    */
   public ${this.mangle($schema.getName())}() {}
 #if ($this.isCreateAllArgsConstructor())
@@ -96,7 +96,7 @@
 
 #end
   public org.apache.avro.Schema getSchema() { return SCHEMA$; }
-  // Used by DatumWriter.  Applications should not call. 
+  // Used by DatumWriter.  Applications should not call.
   public java.lang.Object get(int field$) {
     switch (field$) {
 #set ($i = 0)
@@ -107,7 +107,7 @@
     default: throw new org.apache.avro.AvroRuntimeException("Bad index");
     }
   }
-  // Used by DatumReader.  Applications should not call. 
+  // Used by DatumReader.  Applications should not call.
   @SuppressWarnings(value="unchecked")
   public void put(int field$, java.lang.Object value$) {
     switch (field$) {
@@ -151,7 +151,7 @@
   public static #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder newBuilder() {
     return new #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder();
   }
-  
+
   /**
    * Creates a new ${this.mangle($schema.getName())} RecordBuilder by copying an existing Builder.
    * @param other The existing builder to copy.
@@ -160,7 +160,7 @@
   public static #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder newBuilder(#if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder other) {
     return new #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder(other);
   }
-  
+
   /**
    * Creates a new ${this.mangle($schema.getName())} RecordBuilder by copying an existing $this.mangle($schema.getName()) instance.
    * @param other The existing instance to copy.
@@ -169,7 +169,7 @@
   public static #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder newBuilder(#if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())} other) {
     return new #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder(other);
   }
-  
+
   /**
    * RecordBuilder for ${this.mangle($schema.getName())} instances.
    */
@@ -191,7 +191,7 @@
     private Builder() {
       super(SCHEMA$);
     }
-    
+
     /**
      * Creates a Builder by copying an existing Builder.
      * @param other The existing Builder to copy.
@@ -210,7 +210,7 @@
 #end
 #end
     }
-    
+
     /**
      * Creates a Builder by copying an existing $this.mangle($schema.getName()) instance
      * @param other The existing instance to copy.
@@ -235,7 +235,7 @@
       super.setValue(value);
       return this;
     }
-    
+
     @Override
     public #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder clearValue() {
       super.clearValue();
@@ -247,7 +247,7 @@
       super.setCause(cause);
       return this;
     }
-    
+
     @Override
     public #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder clearCause() {
       super.clearCause();
@@ -280,7 +280,7 @@
 #end
       this.${this.mangle($field.name(), $schema.isError())} = value;
       fieldSetFlags()[$field.pos()] = true;
-      return this; 
+      return this;
     }
 
     /**
@@ -376,7 +376,7 @@
   }
 
   private static final org.apache.avro.io.DatumWriter
-    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);  
+    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);
 
   @Override public void writeExternal(java.io.ObjectOutput out)
     throws java.io.IOException {
@@ -384,7 +384,7 @@
   }
 
   private static final org.apache.avro.io.DatumReader
-    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);  
+    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);
 
   @Override public void readExternal(java.io.ObjectInput in)
     throws java.io.IOException {
diff --git a/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avdl b/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avdl
index 4533c54..233c0f6 100644
--- a/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avdl
+++ b/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avdl
@@ -18,7 +18,7 @@
 
 @namespace("org.on.the.classpath")
 protocol OnTheClasspath {
-    import idl "nestedtypes.avdl";
-	record FromAfar {
-	}
+  import idl "nestedtypes.avdl";
+  record FromAfar {
+  }
 }
diff --git a/lang/java/compiler/src/test/idl/putOnClassPath/nestedtypes.avdl b/lang/java/compiler/src/test/idl/putOnClassPath/nestedtypes.avdl
index a8aafe4..6d2e1da 100644
--- a/lang/java/compiler/src/test/idl/putOnClassPath/nestedtypes.avdl
+++ b/lang/java/compiler/src/test/idl/putOnClassPath/nestedtypes.avdl
@@ -18,6 +18,6 @@
 
 @namespace("org.on.the.classpath")
 protocol OnTheClasspathTypes {
-	record NestedType {
-	}
+  record NestedType {
+  }
 }
diff --git a/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestIdl.java b/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestIdl.java
index 7e4f686..52403a2 100644
--- a/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestIdl.java
+++ b/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestIdl.java
@@ -89,7 +89,7 @@
     if (! "run".equals(TEST_MODE)) return;
 
     int passed = 0, failed = 0;
-    
+
     for (GenTest t : tests) {
       try {
         t.run();
@@ -136,7 +136,7 @@
       String newPath = currentWorkPath + "src" + File.separator + "test"
         + File.separator + "idl" + File.separator
         + "putOnClassPath" + File.separator;
-      URL[] newPathURL = new URL[]{new URL(newPath)}; 
+      URL[] newPathURL = new URL[]{new URL(newPath)};
       URLClassLoader ucl = new URLClassLoader(newPathURL, cl);
 
       Idl parser = new Idl(in, ucl);
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/CallFuture.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/CallFuture.java
index 77561d3..588ea7d 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/CallFuture.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/CallFuture.java
@@ -32,14 +32,14 @@
   private final Callback<T> chainedCallback;
   private T result = null;
   private Throwable error = null;
-  
+
   /**
    * Creates a CallFuture.
    */
   public CallFuture() {
     this(null);
   }
-  
+
   /**
    * Creates a CallFuture with a chained Callback which will be invoked
    * when this CallFuture's Callback methods are invoked.
@@ -48,9 +48,9 @@
   public CallFuture(Callback<T> chainedCallback) {
     this.chainedCallback = chainedCallback;
   }
-  
+
   /**
-   * Sets the RPC response, and unblocks all threads waiting on {@link #get()} 
+   * Sets the RPC response, and unblocks all threads waiting on {@link #get()}
    * or {@link #get(long, TimeUnit)}.
    * @param result the RPC result to set.
    */
@@ -62,9 +62,9 @@
       chainedCallback.handleResult(result);
     }
   }
-  
+
   /**
-   * Sets an error thrown during RPC execution, and unblocks all threads waiting 
+   * Sets an error thrown during RPC execution, and unblocks all threads waiting
    * on {@link #get()} or {@link #get(long, TimeUnit)}.
    * @param error the RPC error to set.
    */
@@ -79,21 +79,21 @@
 
   /**
    * Gets the value of the RPC result without blocking.
-   * Using {@link #get()} or {@link #get(long, TimeUnit)} is usually 
-   * preferred because these methods block until the result is available or 
-   * an error occurs. 
-   * @return the value of the response, or null if no result was returned or 
+   * Using {@link #get()} or {@link #get(long, TimeUnit)} is usually
+   * preferred because these methods block until the result is available or
+   * an error occurs.
+   * @return the value of the response, or null if no result was returned or
    * the RPC has not yet completed.
    */
   public T getResult() {
     return result;
   }
-  
+
   /**
    * Gets the error that was thrown during RPC execution.  Does not block.
-   * Either {@link #get()} or {@link #get(long, TimeUnit)} should be called 
+   * Either {@link #get()} or {@link #get(long, TimeUnit)} should be called
    * first because these methods block until the RPC has completed.
-   * @return the RPC error that was thrown, or null if no error has occurred or 
+   * @return the RPC error that was thrown, or null if no error has occurred or
    * if the RPC has not yet completed.
    */
   public Throwable getError() {
@@ -132,7 +132,7 @@
       throw new TimeoutException();
     }
   }
-  
+
   /**
    * Waits for the CallFuture to complete without returning the result.
    * @throws InterruptedException if interrupted.
@@ -140,7 +140,7 @@
   public void await() throws InterruptedException {
     latch.await();
   }
-  
+
   /**
    * Waits for the CallFuture to complete without returning the result.
    * @param timeout the maximum time to wait.
@@ -148,7 +148,7 @@
    * @throws InterruptedException if interrupted.
    * @throws TimeoutException if the wait timed out.
    */
-  public void await(long timeout, TimeUnit unit) 
+  public void await(long timeout, TimeUnit unit)
     throws InterruptedException, TimeoutException {
     if (!latch.await(timeout, unit)) {
       throw new TimeoutException();
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Callback.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Callback.java
index fdad4a7..a099725 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Callback.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Callback.java
@@ -20,8 +20,8 @@
 
 /**
  * Interface for receiving asynchronous callbacks.
- * For each request with an asynchronous callback, 
- * either {@link #handleResult(Object)} or {@link #handleError(Throwable)} 
+ * For each request with an asynchronous callback,
+ * either {@link #handleResult(Object)} or {@link #handleError(Throwable)}
  * will be invoked.
  */
 public interface Callback<T> {
@@ -30,7 +30,7 @@
    * @param result the result returned in the callback.
    */
   void handleResult(T result);
-  
+
   /**
    * Receives an error.
    * @param error the error returned in the callback.
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java
index f0a8f1b..4990bf0 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java
@@ -66,7 +66,7 @@
       }
     }
   }
-  
+
   public void close() { this.interrupt(); }
 
   public static void main(String[] arg) throws Exception {
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpTransceiver.java
index 9f6572f..11c9ebb 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpTransceiver.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpTransceiver.java
@@ -31,13 +31,13 @@
 
 /** An HTTP-based {@link Transceiver} implementation. */
 public class HttpTransceiver extends Transceiver {
-  static final String CONTENT_TYPE = "avro/binary"; 
+  static final String CONTENT_TYPE = "avro/binary";
 
   private URL url;
   private Proxy proxy;
   private HttpURLConnection connection;
   private int timeout;
-  
+
   public HttpTransceiver(URL url) { this.url = url; }
 
   public HttpTransceiver(URL url, Proxy proxy) {
@@ -49,7 +49,7 @@
   public void setTimeout(int timeout) { this.timeout = timeout; }
 
   public String getRemoteName() { return this.url.toString(); }
-    
+
   public synchronized List<ByteBuffer> readBuffers() throws IOException {
     InputStream in = connection.getInputStream();
     try {
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyServer.java
index a86ebbe..534f0bf 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyServer.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyServer.java
@@ -62,13 +62,13 @@
       "avro-netty-server");
   private final ChannelFactory channelFactory;
   private final CountDownLatch closed = new CountDownLatch(1);
-  private final ExecutionHandler executionHandler;            
-  
+  private final ExecutionHandler executionHandler;
+
   public NettyServer(Responder responder, InetSocketAddress addr) {
     this(responder, addr, new NioServerSocketChannelFactory
          (Executors .newCachedThreadPool(), Executors.newCachedThreadPool()));
   }
-  
+
   public NettyServer(Responder responder, InetSocketAddress addr,
                      ChannelFactory channelFactory) {
       this(responder, addr, channelFactory, null);
@@ -123,12 +123,12 @@
       }
     }, executionHandler);
   }
-    
+
   @Override
   public void start() {
     // No-op.
   }
-  
+
   @Override
   public void close() {
     ChannelGroupFuture future = allChannels.close();
@@ -136,7 +136,7 @@
     channelFactory.releaseExternalResources();
     closed.countDown();
   }
-  
+
   @Override
   public int getPort() {
     return ((InetSocketAddress) serverChannel.getLocalAddress()).getPort();
@@ -146,7 +146,7 @@
   public void join() throws InterruptedException {
     closed.await();
   }
-  
+
   /**
    *
    * @return The number of clients currently connected to this server.
@@ -158,12 +158,12 @@
   }
 
   /**
-   * Avro server handler for the Netty transport 
+   * Avro server handler for the Netty transport
    */
   class NettyServerAvroHandler extends SimpleChannelUpstreamHandler {
 
     private NettyTransceiver connectionMetadata = new NettyTransceiver();
-    
+
     @Override
     public void handleUpstream(ChannelHandlerContext ctx, ChannelEvent e)
         throws Exception {
@@ -189,7 +189,7 @@
         // response will be null for oneway messages.
         if(res != null) {
           dataPack.setDatas(res);
-          e.getChannel().write(dataPack);          
+          e.getChannel().write(dataPack);
         }
       } catch (IOException ex) {
         LOG.warn("unexpect error");
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransceiver.java
index a8a2e3d..0d7dfb7 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransceiver.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransceiver.java
@@ -61,24 +61,24 @@
 public class NettyTransceiver extends Transceiver {
   /** If not specified, the default connection timeout will be used (60 sec). */
   public static final long DEFAULT_CONNECTION_TIMEOUT_MILLIS = 60 * 1000L;
-  public static final String NETTY_CONNECT_TIMEOUT_OPTION = 
+  public static final String NETTY_CONNECT_TIMEOUT_OPTION =
       "connectTimeoutMillis";
   public static final String NETTY_TCP_NODELAY_OPTION = "tcpNoDelay";
   public static final String NETTY_KEEPALIVE_OPTION = "keepAlive";
   public static final boolean DEFAULT_TCP_NODELAY_VALUE = true;
-  
+
   private static final Logger LOG = LoggerFactory.getLogger(NettyTransceiver.class
       .getName());
 
   private final AtomicInteger serialGenerator = new AtomicInteger(0);
-  private final Map<Integer, Callback<List<ByteBuffer>>> requests = 
+  private final Map<Integer, Callback<List<ByteBuffer>>> requests =
     new ConcurrentHashMap<Integer, Callback<List<ByteBuffer>>>();
-  
+
   private final ChannelFactory channelFactory;
   private final long connectTimeoutMillis;
   private final ClientBootstrap bootstrap;
   private final InetSocketAddress remoteAddr;
-  
+
   volatile ChannelFuture channelFuture;
   volatile boolean stopping;
   private final Object channelFutureLock = new Object();
@@ -101,7 +101,7 @@
 
   /**
    * Creates a NettyTransceiver, and attempts to connect to the given address.
-   * {@link #DEFAULT_CONNECTION_TIMEOUT_MILLIS} is used for the connection 
+   * {@link #DEFAULT_CONNECTION_TIMEOUT_MILLIS} is used for the connection
    * timeout.
    * @param addr the address to connect to.
    * @throws IOException if an error occurs connecting to the given address.
@@ -109,75 +109,75 @@
   public NettyTransceiver(InetSocketAddress addr) throws IOException {
     this(addr, DEFAULT_CONNECTION_TIMEOUT_MILLIS);
   }
-  
+
   /**
    * Creates a NettyTransceiver, and attempts to connect to the given address.
    * @param addr the address to connect to.
-   * @param connectTimeoutMillis maximum amount of time to wait for connection 
-   * establishment in milliseconds, or null to use 
+   * @param connectTimeoutMillis maximum amount of time to wait for connection
+   * establishment in milliseconds, or null to use
    * {@link #DEFAULT_CONNECTION_TIMEOUT_MILLIS}.
    * @throws IOException if an error occurs connecting to the given address.
    */
-  public NettyTransceiver(InetSocketAddress addr, 
+  public NettyTransceiver(InetSocketAddress addr,
       Long connectTimeoutMillis) throws IOException {
     this(addr, new NioClientSocketChannelFactory(
         Executors.newCachedThreadPool(new NettyTransceiverThreadFactory(
-            "Avro " + NettyTransceiver.class.getSimpleName() + " Boss")), 
+            "Avro " + NettyTransceiver.class.getSimpleName() + " Boss")),
         Executors.newCachedThreadPool(new NettyTransceiverThreadFactory(
-            "Avro " + NettyTransceiver.class.getSimpleName() + " I/O Worker"))), 
+            "Avro " + NettyTransceiver.class.getSimpleName() + " I/O Worker"))),
         connectTimeoutMillis);
   }
 
   /**
    * Creates a NettyTransceiver, and attempts to connect to the given address.
-   * {@link #DEFAULT_CONNECTION_TIMEOUT_MILLIS} is used for the connection 
+   * {@link #DEFAULT_CONNECTION_TIMEOUT_MILLIS} is used for the connection
    * timeout.
    * @param addr the address to connect to.
    * @param channelFactory the factory to use to create a new Netty Channel.
    * @throws IOException if an error occurs connecting to the given address.
    */
-  public NettyTransceiver(InetSocketAddress addr, ChannelFactory channelFactory) 
+  public NettyTransceiver(InetSocketAddress addr, ChannelFactory channelFactory)
     throws IOException {
     this(addr, channelFactory, buildDefaultBootstrapOptions(null));
   }
-  
+
   /**
    * Creates a NettyTransceiver, and attempts to connect to the given address.
    * @param addr the address to connect to.
    * @param channelFactory the factory to use to create a new Netty Channel.
-   * @param connectTimeoutMillis maximum amount of time to wait for connection 
-   * establishment in milliseconds, or null to use 
+   * @param connectTimeoutMillis maximum amount of time to wait for connection
+   * establishment in milliseconds, or null to use
    * {@link #DEFAULT_CONNECTION_TIMEOUT_MILLIS}.
    * @throws IOException if an error occurs connecting to the given address.
    */
-  public NettyTransceiver(InetSocketAddress addr, ChannelFactory channelFactory, 
+  public NettyTransceiver(InetSocketAddress addr, ChannelFactory channelFactory,
       Long connectTimeoutMillis) throws IOException {
-    this(addr, channelFactory, 
+    this(addr, channelFactory,
         buildDefaultBootstrapOptions(connectTimeoutMillis));
   }
-  
+
   /**
    * Creates a NettyTransceiver, and attempts to connect to the given address.
-   * It is strongly recommended that the {@link #NETTY_CONNECT_TIMEOUT_OPTION} 
-   * option be set to a reasonable timeout value (a Long value in milliseconds) 
-   * to prevent connect/disconnect attempts from hanging indefinitely.  It is 
-   * also recommended that the {@link #NETTY_TCP_NODELAY_OPTION} option be set 
+   * It is strongly recommended that the {@link #NETTY_CONNECT_TIMEOUT_OPTION}
+   * option be set to a reasonable timeout value (a Long value in milliseconds)
+   * to prevent connect/disconnect attempts from hanging indefinitely.  It is
+   * also recommended that the {@link #NETTY_TCP_NODELAY_OPTION} option be set
    * to true to minimize RPC latency.
    * @param addr the address to connect to.
    * @param channelFactory the factory to use to create a new Netty Channel.
-   * @param nettyClientBootstrapOptions map of Netty ClientBootstrap options 
+   * @param nettyClientBootstrapOptions map of Netty ClientBootstrap options
    * to use.
    * @throws IOException if an error occurs connecting to the given address.
    */
-  public NettyTransceiver(InetSocketAddress addr, ChannelFactory channelFactory, 
+  public NettyTransceiver(InetSocketAddress addr, ChannelFactory channelFactory,
       Map<String, Object> nettyClientBootstrapOptions) throws IOException {
     if (channelFactory == null) {
       throw new NullPointerException("channelFactory is null");
     }
-    
+
     // Set up.
     this.channelFactory = channelFactory;
-    this.connectTimeoutMillis = (Long) 
+    this.connectTimeoutMillis = (Long)
         nettyClientBootstrapOptions.get(NETTY_CONNECT_TIMEOUT_OPTION);
     bootstrap = new ClientBootstrap(channelFactory);
     remoteAddr = addr;
@@ -195,7 +195,7 @@
     });
 
     if (nettyClientBootstrapOptions != null) {
-      LOG.debug("Using Netty bootstrap options: " + 
+      LOG.debug("Using Netty bootstrap options: " +
           nettyClientBootstrapOptions);
       bootstrap.setOptions(nettyClientBootstrapOptions);
     }
@@ -220,19 +220,19 @@
       stateLock.readLock().unlock();
     }
   }
-  
+
   /**
-   * Creates a Netty ChannelUpstreamHandler for handling events on the 
+   * Creates a Netty ChannelUpstreamHandler for handling events on the
    * Netty client channel.
    * @return the ChannelUpstreamHandler to use.
    */
   protected ChannelUpstreamHandler createNettyClientAvroHandler() {
     return new NettyClientAvroHandler();
   }
-  
+
   /**
    * Creates the default options map for the Netty ClientBootstrap.
-   * @param connectTimeoutMillis connection timeout in milliseconds, or null 
+   * @param connectTimeoutMillis connection timeout in milliseconds, or null
    * if no timeout is desired.
    * @return the map of Netty bootstrap options.
    */
@@ -241,25 +241,25 @@
     Map<String, Object> options = new HashMap<String, Object>(3);
     options.put(NETTY_TCP_NODELAY_OPTION, DEFAULT_TCP_NODELAY_VALUE);
     options.put(NETTY_KEEPALIVE_OPTION, true);
-    options.put(NETTY_CONNECT_TIMEOUT_OPTION, 
-        connectTimeoutMillis == null ? DEFAULT_CONNECTION_TIMEOUT_MILLIS : 
+    options.put(NETTY_CONNECT_TIMEOUT_OPTION,
+        connectTimeoutMillis == null ? DEFAULT_CONNECTION_TIMEOUT_MILLIS :
           connectTimeoutMillis);
     return options;
   }
-  
+
   /**
    * Tests whether the given channel is ready for writing.
    * @return true if the channel is open and ready; false otherwise.
    */
   private static boolean isChannelReady(Channel channel) {
-    return (channel != null) && 
+    return (channel != null) &&
       channel.isOpen() && channel.isBound() && channel.isConnected();
   }
-  
+
   /**
-   * Gets the Netty channel.  If the channel is not connected, first attempts 
+   * Gets the Netty channel.  If the channel is not connected, first attempts
    * to connect.
-   * NOTE: The stateLock read lock *must* be acquired before calling this 
+   * NOTE: The stateLock read lock *must* be acquired before calling this
    * method.
    * @return the Netty channel
    * @throws IOException if an error occurs connecting the channel.
@@ -283,13 +283,13 @@
               channelFuture.await(connectTimeoutMillis);
             } catch (InterruptedException e) {
               Thread.currentThread().interrupt(); // Reset interrupt flag
-              throw new IOException("Interrupted while connecting to " + 
+              throw new IOException("Interrupted while connecting to " +
                   remoteAddr);
             }
 
             synchronized(channelFutureLock) {
           if (!channelFuture.isSuccess()) {
-            throw new IOException("Error connecting to " + remoteAddr, 
+            throw new IOException("Error connecting to " + remoteAddr,
                 channelFuture.getCause());
           }
           channel = channelFuture.getChannel();
@@ -305,20 +305,20 @@
     }
     return channel;
   }
-  
+
   /**
    * Closes the connection to the remote peer if connected.
    */
   private void disconnect() {
     disconnect(false, false, null);
   }
-  
+
   /**
    * Closes the connection to the remote peer if connected.
    * @param awaitCompletion if true, will block until the close has completed.
-   * @param cancelPendingRequests if true, will drain the requests map and 
+   * @param cancelPendingRequests if true, will drain the requests map and
    * send an IOException to all Callbacks.
-   * @param cause if non-null and cancelPendingRequests is true, this Throwable 
+   * @param cause if non-null and cancelPendingRequests is true, this Throwable
    * will be passed to all Callbacks.
    */
   private void disconnect(boolean awaitCompletion, boolean cancelPendingRequests,
@@ -337,7 +337,7 @@
     if (channelFutureToCancel != null) {
       channelFutureToCancel.cancel();
     }
-    
+
     if (stateReadLockHeld) {
       stateLock.readLock().unlock();
     }
@@ -354,9 +354,9 @@
         channel = null;
         remote = null;
         if (cancelPendingRequests) {
-          // Remove all pending requests (will be canceled after relinquishing 
+          // Remove all pending requests (will be canceled after relinquishing
           // write lock).
-          requestsToCancel = 
+          requestsToCancel =
             new ConcurrentHashMap<Integer, Callback<List<ByteBuffer>>>(requests);
           requests.clear();
         }
@@ -367,17 +367,17 @@
       }
       stateLock.writeLock().unlock();
     }
-    
+
     // Cancel any pending requests by sending errors to the callbacks:
     if ((requestsToCancel != null) && !requestsToCancel.isEmpty()) {
       LOG.debug("Removing " + requestsToCancel.size() + " pending request(s).");
       for (Callback<List<ByteBuffer>> request : requestsToCancel.values()) {
         request.handleError(
-            cause != null ? cause : 
+            cause != null ? cause :
               new IOException(getClass().getSimpleName() + " closed"));
       }
     }
-    
+
     // Close the channel:
     if (channelToClose != null) {
       ChannelFuture closeFuture = channelToClose.close();
@@ -391,35 +391,35 @@
       }
     }
   }
-  
+
   /**
    * Netty channels are thread-safe, so there is no need to acquire locks.
    * This method is a no-op.
    */
   @Override
   public void lockChannel() {
-    
+
   }
-  
+
   /**
    * Netty channels are thread-safe, so there is no need to acquire locks.
    * This method is a no-op.
    */
   @Override
   public void unlockChannel() {
-    
+
   }
 
   /**
    * Closes this transceiver and disconnects from the remote peer.
-   * Cancels all pending RPCs, sends an IOException to all pending callbacks, 
+   * Cancels all pending RPCs, sends an IOException to all pending callbacks,
    * and blocks until the close has completed.
    */
   @Override
   public void close() {
     close(true);
   }
-  
+
   /**
    * Closes this transceiver and disconnects from the remote peer.
    * Cancels all pending RPCs and sends an IOException to all pending callbacks.
@@ -450,7 +450,7 @@
    * Override as non-synchronized method because the method is thread safe.
    */
   @Override
-  public List<ByteBuffer> transceive(List<ByteBuffer> request) 
+  public List<ByteBuffer> transceive(List<ByteBuffer> request)
     throws IOException {
     try {
       CallFuture<List<ByteBuffer>> transceiverFuture = new CallFuture<List<ByteBuffer>>();
@@ -464,9 +464,9 @@
       return null;
     }
   }
-  
+
   @Override
-  public void transceive(List<ByteBuffer> request, 
+  public void transceive(List<ByteBuffer> request,
       Callback<List<ByteBuffer>> callback) throws IOException {
     stateLock.readLock().lock();
     try {
@@ -478,7 +478,7 @@
       stateLock.readLock().unlock();
     }
   }
-  
+
   @Override
   public void writeBuffers(List<ByteBuffer> buffers) throws IOException {
     ChannelFuture writeFuture;
@@ -489,7 +489,7 @@
     } finally {
       stateLock.readLock().unlock();
     }
-    
+
     if (!writeFuture.isDone()) {
       try {
         writeFuture.await();
@@ -502,10 +502,10 @@
       throw new IOException("Error writing buffers", writeFuture.getCause());
     }
   }
-  
+
   /**
    * Writes a NettyDataPack, reconnecting to the remote peer if necessary.
-   * NOTE: The stateLock read lock *must* be acquired before calling this 
+   * NOTE: The stateLock read lock *must* be acquired before calling this
    * method.
    * @param dataPack the data pack to write.
    * @return the Netty ChannelFuture for the write operation.
@@ -517,9 +517,9 @@
 
   @Override
   public List<ByteBuffer> readBuffers() throws IOException {
-    throw new UnsupportedOperationException();  
+    throw new UnsupportedOperationException();
   }
-  
+
   @Override
   public Protocol getRemote() {
     stateLock.readLock().lock();
@@ -549,23 +549,23 @@
       stateLock.writeLock().unlock();
     }
   }
-  
+
   /**
-   * A ChannelFutureListener for channel write operations that notifies 
+   * A ChannelFutureListener for channel write operations that notifies
    * a {@link Callback} if an error occurs while writing to the channel.
    */
   protected class WriteFutureListener implements ChannelFutureListener {
     protected final Callback<List<ByteBuffer>> callback;
-    
+
     /**
-     * Creates a WriteFutureListener that notifies the given callback 
+     * Creates a WriteFutureListener that notifies the given callback
      * if an error occurs writing data to the channel.
      * @param callback the callback to notify, or null to skip notification.
      */
     public WriteFutureListener(Callback<List<ByteBuffer>> callback) {
       this.callback = callback;
     }
-    
+
     @Override
     public void operationComplete(ChannelFuture future) throws Exception {
       if (!future.isSuccess() && (callback != null)) {
@@ -576,7 +576,7 @@
   }
 
   /**
-   * Avro client handler for the Netty transport 
+   * Avro client handler for the Netty transport
    */
   protected class NettyClientAvroHandler extends SimpleChannelUpstreamHandler {
 
@@ -618,7 +618,7 @@
 
     @Override
     public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
-      disconnect(false, true, e.getCause());      
+      disconnect(false, true, e.getCause());
     }
 
   }
@@ -629,18 +629,18 @@
   protected static class NettyTransceiverThreadFactory implements ThreadFactory {
     private final AtomicInteger threadId = new AtomicInteger(0);
     private final String prefix;
-    
+
     /**
-     * Creates a NettyTransceiverThreadFactory that creates threads with the 
+     * Creates a NettyTransceiverThreadFactory that creates threads with the
      * specified name.
-     * @param prefix the name prefix to use for all threads created by this 
-     * ThreadFactory.  A unique ID will be appended to this prefix to form the 
+     * @param prefix the name prefix to use for all threads created by this
+     * ThreadFactory.  A unique ID will be appended to this prefix to form the
      * final thread name.
      */
     public NettyTransceiverThreadFactory(String prefix) {
       this.prefix = prefix;
     }
-    
+
     @Override
     public Thread newThread(Runnable r) {
       Thread thread = new Thread(r);
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransportCodec.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransportCodec.java
index 1668c5e..9a96a37 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransportCodec.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransportCodec.java
@@ -31,23 +31,23 @@
 import org.jboss.netty.handler.codec.oneone.OneToOneEncoder;
 
 /**
- * Data structure, encoder and decoder classes for the Netty transport. 
+ * Data structure, encoder and decoder classes for the Netty transport.
  */
 public class NettyTransportCodec {
   /**
-   * Transport protocol data structure when using Netty. 
+   * Transport protocol data structure when using Netty.
    */
   public static class NettyDataPack {
     private int serial; // to track each call in client side
     private List<ByteBuffer> datas;
 
     public NettyDataPack() {}
-    
+
     public NettyDataPack(int serial, List<ByteBuffer> datas) {
       this.serial = serial;
       this.datas = datas;
     }
-    
+
     public void setSerial(int serial) {
       this.serial = serial;
     }
@@ -55,7 +55,7 @@
     public int getSerial() {
       return serial;
     }
-    
+
     public void setDatas(List<ByteBuffer> datas) {
       this.datas = datas;
     }
@@ -63,19 +63,19 @@
     public List<ByteBuffer> getDatas() {
       return datas;
     }
-    
+
   }
-  
+
   /**
-   * Protocol encoder which converts NettyDataPack which contains the 
-   * Responder's output List&lt;ByteBuffer&gt; to ChannelBuffer needed 
+   * Protocol encoder which converts NettyDataPack which contains the
+   * Responder's output List&lt;ByteBuffer&gt; to ChannelBuffer needed
    * by Netty.
    */
   public static class NettyFrameEncoder extends OneToOneEncoder {
 
     /**
      * encode msg to ChannelBuffer
-     * @param msg NettyDataPack from 
+     * @param msg NettyDataPack from
      *            NettyServerAvroHandler/NettyClientAvroHandler in the pipeline
      * @return encoded ChannelBuffer
      */
@@ -94,7 +94,7 @@
       return ChannelBuffers
           .wrappedBuffer(bbs.toArray(new ByteBuffer[bbs.size()]));
     }
-    
+
     private ByteBuffer getPackHeader(NettyDataPack dataPack) {
       ByteBuffer header = ByteBuffer.allocate(8);
       header.putInt(dataPack.getSerial());
@@ -112,8 +112,8 @@
   }
 
   /**
-   * Protocol decoder which converts Netty's ChannelBuffer to 
-   * NettyDataPack which contains a List&lt;ByteBuffer&gt; needed 
+   * Protocol decoder which converts Netty's ChannelBuffer to
+   * NettyDataPack which contains a List&lt;ByteBuffer&gt; needed
    * by Avro Responder.
    */
   public static class NettyFrameDecoder extends FrameDecoder {
@@ -127,7 +127,7 @@
     public NettyFrameDecoder() {
       maxMem = Runtime.getRuntime().maxMemory();
     }
-    
+
     /**
      * decode buffer to NettyDataPack
      */
@@ -148,9 +148,9 @@
           return null;
         }
       }
-      
+
     }
-    
+
     private boolean decodePackHeader(ChannelHandlerContext ctx, Channel channel,
         ChannelBuffer buffer) throws Exception {
       if (buffer.readableBytes()<8) {
@@ -173,7 +173,7 @@
 
       return true;
     }
-    
+
     private boolean decodePackBody(ChannelHandlerContext ctx, Channel channel,
         ChannelBuffer buffer) throws Exception {
       if (buffer.readableBytes() < 4) {
@@ -181,7 +181,7 @@
       }
 
       buffer.markReaderIndex();
-      
+
       int length = buffer.readInt();
 
       if (buffer.readableBytes() < length) {
@@ -193,10 +193,10 @@
       buffer.readBytes(bb);
       bb.flip();
       dataPack.getDatas().add(bb);
-      
+
       return dataPack.getDatas().size()==listSize;
     }
 
   }
-  
+
 }
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCContext.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCContext.java
index 99a88ac..9eeefc4 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCContext.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCContext.java
@@ -34,33 +34,33 @@
  *
  */
 public class RPCContext {
-  
+
   private HandshakeRequest handshakeRequest;
   private HandshakeResponse handshakeResponse;
 
   protected Map<String,ByteBuffer> requestCallMeta, responseCallMeta;
-  
+
   protected Object response;
   protected Exception error;
   private Message message;
   List<ByteBuffer> requestPayload;
   List<ByteBuffer> responsePayload;
-  
+
   /** Set the handshake request of this RPC. */
   public void setHandshakeRequest(HandshakeRequest handshakeRequest) {
     this.handshakeRequest = handshakeRequest;
   }
-  
+
   /** Get the handshake request of this RPC. */
   public HandshakeRequest getHandshakeRequest() {
     return this.handshakeRequest;
   }
-  
+
   /** Set the handshake response of this RPC. */
   public void setHandshakeResponse(HandshakeResponse handshakeResponse) {
     this.handshakeResponse = handshakeResponse;
   }
-  
+
   /** Get the handshake response of this RPC. */
   public HandshakeResponse getHandshakeResponse() {
     return this.handshakeResponse;
@@ -77,11 +77,11 @@
       handshakeRequest.meta = new HashMap<String,ByteBuffer>();
     return handshakeRequest.meta;
   }
-  
+
   void setRequestHandshakeMeta(Map<String,ByteBuffer> newmeta) {
     handshakeRequest.meta = newmeta;
   }
-  
+
   /**
    * This is an access method for the handshake state
    * provided by the server back to the client
@@ -93,11 +93,11 @@
       handshakeResponse.meta = new HashMap<String,ByteBuffer>();
     return handshakeResponse.meta;
   }
-  
+
   void setResponseHandshakeMeta(Map<String,ByteBuffer> newmeta) {
     handshakeResponse.meta = newmeta;
   }
-  
+
   /**
    * This is an access method for the per-call state
    * provided by the client to the server.
@@ -110,11 +110,11 @@
     }
     return requestCallMeta;
   }
-  
+
   void setRequestCallMeta(Map<String,ByteBuffer> newmeta) {
     requestCallMeta = newmeta;
   }
-  
+
   /**
    * This is an access method for the per-call state
    * provided by the server back to the client.
@@ -127,16 +127,16 @@
     }
     return responseCallMeta;
   }
-  
+
   void setResponseCallMeta(Map<String,ByteBuffer> newmeta) {
     responseCallMeta = newmeta;
   }
-  
+
   void setResponse(Object response) {
     this.response = response;
     this.error = null;
   }
-  
+
   /**
    * The response object generated at the server,
    * if it exists.  If an exception was generated,
@@ -147,12 +147,12 @@
   public Object response() {
     return response;
   }
-  
+
   void setError(Exception error) {
     this.response = null;
     this.error = error;
   }
-  
+
   /**
    * The exception generated at the server,
    * or null if no such exception has occured
@@ -162,7 +162,7 @@
   public Exception error() {
     return error;
   }
-  
+
   /**
    * Indicates whether an exception was generated
    * at the server
@@ -172,41 +172,41 @@
   public boolean isError() {
     return error != null;
   }
-  
+
   /** Sets the {@link Message} corresponding to this RPC */
   public void setMessage(Message message) {
-    this.message = message;    
+    this.message = message;
   }
-  
+
   /** Returns the {@link Message} corresponding to this RPC
-   * @return this RPC's {@link Message} 
+   * @return this RPC's {@link Message}
    */
   public Message getMessage() { return message; }
-  
+
   /** Sets the serialized payload of the request in this RPC. Will
    * not include handshake or meta-data. */
   public void setRequestPayload(List<ByteBuffer> payload) {
     this.requestPayload = payload;
   }
- 
+
   /** Returns the serialized payload of the request in this RPC. Will only be
-   * generated from a Requestor and will not include handshake or meta-data. 
+   * generated from a Requestor and will not include handshake or meta-data.
    * If the request payload has not been set yet, returns null.
-   * 
+   *
    * @return this RPC's request payload.*/
   public List<ByteBuffer> getRequestPayload() {
     return this.requestPayload;
   }
-  
+
   /** Returns the serialized payload of the response in this RPC. Will only be
-   * generated from a Responder and will not include handshake or meta-data. 
+   * generated from a Responder and will not include handshake or meta-data.
    * If the response payload has not been set yet, returns null.
-   * 
+   *
    * @return this RPC's response payload.*/
   public List<ByteBuffer> getResponsePayload() {
     return this.responsePayload;
   }
-  
+
   /** Sets the serialized payload of the response in this RPC. Will
    * not include handshake or meta-data. */
   public void setResponsePayload(List<ByteBuffer> payload) {
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCPlugin.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCPlugin.java
index 64e1231..0dba9af 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCPlugin.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCPlugin.java
@@ -24,20 +24,20 @@
  * and can be set or queried by subsequent instrumentation points.
  */
 public class RPCPlugin {
-  
+
   /**
    * Called on the client before the initial RPC handshake to
    * setup any handshake metadata for this plugin
    * @param context the handshake rpc context
    */
   public void clientStartConnect(RPCContext context) { }
-  
+
   /**
    * Called on the server during the RPC handshake
    * @param context the handshake rpc context
    */
   public void serverConnecting(RPCContext context) { }
-  
+
   /**
    * Called on the client after the initial RPC handshake
    * @param context the handshake rpc context
@@ -49,22 +49,22 @@
    * @param context the per-call rpc context (in/out parameter)
    */
   public void clientSendRequest(RPCContext context) { }
- 
-  
+
+
   /**
    * This method is invoked at the RPC server when the request is received,
    * but before the call itself is executed
    * @param context the per-call rpc context (in/out parameter)
    */
   public void serverReceiveRequest(RPCContext context) { }
-  
+
   /**
    * This method is invoked at the server before the response is executed,
    * but before the response has been formulated
    * @param context the per-call rpc context (in/out parameter)
    */
   public void serverSendResponse(RPCContext context) { }
-  
+
   /**
    * This method is invoked at the client after the call is executed,
    * and after the client receives the response
@@ -72,5 +72,5 @@
    */
   public void clientReceiveResponse(RPCContext context) { }
 
-  
+
 }
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java
index 5379945..4dfeb7c 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java
@@ -64,7 +64,7 @@
   private volatile boolean sendLocalText;
   private final Transceiver transceiver;
   private final ReentrantLock handshakeLock = new ReentrantLock();
-  
+
   protected final List<RPCPlugin> rpcMetaPlugins;
 
   public Protocol getLocal() { return local; }
@@ -77,7 +77,7 @@
     this.rpcMetaPlugins =
       new CopyOnWriteArrayList<RPCPlugin>();
   }
-  
+
   /**
    * Adds a new plugin to manipulate RPC metadata.  Plugins
    * are executed in the order that they are added.
@@ -88,7 +88,7 @@
   }
 
   private static final EncoderFactory ENCODER_FACTORY = new EncoderFactory();
-  
+
   /** Writes a request message and reads a response or error message. */
   public Object request(String messageName, Object request)
     throws Exception {
@@ -96,10 +96,10 @@
     Request rpcRequest = new Request(messageName, request, new RPCContext());
     CallFuture<Object> future = /* only need a Future for two-way messages */
       rpcRequest.getMessage().isOneWay() ? null : new CallFuture<Object>();
-    
+
     // Send request
     request(rpcRequest, future);
-    
+
     if (future == null)        // the message is one-way, so return immediately
       return null;
     try {                      // the message is two-way, wait for the result
@@ -112,7 +112,7 @@
       }
     }
   }
-  
+
   /**
    * Writes a request message and returns the result through a Callback.
    * Clients can also use a Future interface by creating a new CallFuture<T>,
@@ -120,15 +120,15 @@
    * @param <T> the return type of the message.
    * @param messageName the name of the message to invoke.
    * @param request the request data to send.
-   * @param callback the callback which will be invoked when the response is returned 
+   * @param callback the callback which will be invoked when the response is returned
    * or an error occurs.
    * @throws Exception if an error occurs sending the message.
    */
-  public <T> void request(String messageName, Object request, Callback<T> callback) 
+  public <T> void request(String messageName, Object request, Callback<T> callback)
     throws Exception {
     request(new Request(messageName, request, new RPCContext()), callback);
   }
-  
+
   /** Writes a request message and returns the result through a Callback. */
   <T> void request(Request request, Callback<T> callback)
     throws Exception {
@@ -166,7 +166,7 @@
         }
       }
     }
-    
+
     if (request.getMessage().isOneWay()) {
       t.lockChannel();
       try {
@@ -181,7 +181,7 @@
       t.transceive(request.getBytes(),
                    new TransceiverCallback<T>(request, callback));
     }
-    
+
   }
 
   private static final ConcurrentMap<String,MD5> REMOTE_HASHES =
@@ -212,14 +212,14 @@
     handshake.serverHash = remoteHash;
     if (sendLocalText)
       handshake.clientProtocol = local.toString();
-    
+
     RPCContext context = new RPCContext();
     context.setHandshakeRequest(handshake);
     for (RPCPlugin plugin : rpcMetaPlugins) {
       plugin.clientStartConnect(context);
     }
     handshake.meta = context.requestHandshakeMeta();
-    
+
     HANDSHAKE_WRITER.write(handshake, out);
   }
 
@@ -246,7 +246,7 @@
     default:
       throw new AvroRuntimeException("Unexpected match: "+handshake.match);
     }
-    
+
     RPCContext context = new RPCContext();
     context.setHandshakeResponse(handshake);
     for (RPCPlugin plugin : rpcMetaPlugins) {
@@ -315,14 +315,14 @@
   /** Reads an error message. */
   public abstract Exception readError(Schema writer, Schema reader, Decoder in)
     throws IOException;
-  
+
   /**
    * Handles callbacks from transceiver invocations.
    */
   protected class TransceiverCallback<T> implements Callback<List<ByteBuffer>> {
     private final Request request;
     private final Callback<T> callback;
-    
+
     /**
      * Creates a TransceiverCallback.
      * @param request the request to set.
@@ -332,7 +332,7 @@
       this.request = request;
       this.callback = callback;
     }
-    
+
     @Override
     @SuppressWarnings("unchecked")
     public void handleResult(List<ByteBuffer> responseBytes) {
@@ -350,7 +350,7 @@
       } catch (Exception e) {
         LOG.error("Error handling transceiver callback: " + e, e);
       }
-      
+
       // Read response; invoke callback
       Response response = new Response(request, in);
       Object responseObject;
@@ -370,13 +370,13 @@
         LOG.error("Error in callback handler: " + t, t);
       }
     }
-    
+
     @Override
     public void handleError(Throwable error) {
       callback.handleError(error);
     }
   }
-  
+
   /**
    * Encapsulates/generates a request.
    */
@@ -387,7 +387,7 @@
     private final BinaryEncoder encoder;
     private Message message;
     private List<ByteBuffer> requestBytes;
-    
+
     /**
      * Creates a Request.
      * @param messageName the name of the message to invoke.
@@ -397,7 +397,7 @@
     public Request(String messageName, Object request, RPCContext context) {
       this(messageName, request, context, null);
     }
-    
+
     /**
      * Creates a Request.
      * @param messageName the name of the message to invoke.
@@ -413,7 +413,7 @@
       this.encoder =
         ENCODER_FACTORY.binaryEncoder(new ByteBufferOutputStream(), encoder);
     }
-    
+
     /**
      * Copy constructor.
      * @param other Request from which to copy fields.
@@ -424,7 +424,7 @@
       this.context = other.context;
       this.encoder = other.encoder;
     }
-    
+
     /**
      * Gets the message name.
      * @return the message name.
@@ -432,7 +432,7 @@
     public String getMessageName() {
       return messageName;
     }
-    
+
     /**
      * Gets the RPC context.
      * @return the RPC context.
@@ -440,7 +440,7 @@
     public RPCContext getContext() {
       return context;
     }
-    
+
     /**
      * Gets the Message associated with this request.
      * @return this request's message.
@@ -454,13 +454,13 @@
       }
       return message;
     }
-    
+
     /**
      * Gets the request data, generating it first if necessary.
      * @return the request data.
      * @throws Exception if an error occurs generating the request data.
      */
-    public List<ByteBuffer> getBytes() 
+    public List<ByteBuffer> getBytes()
       throws Exception {
       if (requestBytes == null) {
         ByteBufferOutputStream bbo = new ByteBufferOutputStream();
@@ -493,14 +493,14 @@
       return requestBytes;
     }
   }
-  
+
   /**
    * Encapsulates/parses a response.
    */
   class Response {
     private final Request request;
     private final BinaryDecoder in;
-    
+
     /**
      * Creates a Response.
      * @param request the Request associated with this response.
@@ -508,7 +508,7 @@
     public Response(Request request) {
       this(request, null);
     }
-    
+
     /**
      * Creates a Creates a Response.
      * @param request the Request associated with this response.
@@ -518,13 +518,13 @@
       this.request = request;
       this.in = in;
     }
-    
+
     /**
      * Gets the RPC response, reading/deserializing it first if necessary.
      * @return the RPC response.
      * @throws Exception if an error occurs reading/deserializing the response.
      */
-    public Object getResponse() 
+    public Object getResponse()
       throws Exception {
       Message lm = request.getMessage();
       Message rm = remote.getMessages().get(request.getMessageName());
@@ -538,7 +538,7 @@
           ("Not both one-way messages: "+request.getMessageName());
 
       if (lm.isOneWay() && t.isConnected()) return null; // one-way w/ handshake
-      
+
       RPCContext context = request.getContext();
       context.setResponseCallMeta(META_READER.read(null, in));
 
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Responder.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Responder.java
index 6a1a3ff..322c3fb 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Responder.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Responder.java
@@ -80,10 +80,10 @@
   /** Return the remote protocol.  Accesses a {@link ThreadLocal} that's set
    * around calls to {@link #respond(Protocol.Message, Object)}. */
   public static Protocol getRemote() { return REMOTE.get(); }
-  
+
   /** Return the local protocol. */
   public Protocol getLocal() { return local; }
-  
+
   /**
    * Adds a new plugin to manipulate per-call metadata.  Plugins
    * are executed in the order that they are added.
@@ -98,7 +98,7 @@
   public List<ByteBuffer> respond(List<ByteBuffer> buffers) throws IOException {
     return respond(buffers, null);
   }
-  
+
   /** Called by a server to deserialize a request, compute and serialize a
    * response or error.  Transciever is used by connection-based servers to
    * track handshake status of connection. */
@@ -119,7 +119,7 @@
       if (remote == null)                        // handshake failed
         return bbo.getBufferList();
       handshake = bbo.getBufferList();
-      
+
       // read request using remote protocol specification
       context.setRequestCallMeta(META_READER.read(null, in));
       String messageName = in.readString(null).toString();
@@ -134,7 +134,7 @@
                                        +" in "+getLocal());
 
       Object request = readRequest(rm.getRequest(), m.getRequest(), in);
-      
+
       context.setMessage(rm);
       for (RPCPlugin plugin : rpcMetaPlugins) {
         plugin.serverReceiveRequest(context);
@@ -145,7 +145,7 @@
         throw new AvroRuntimeException("Not both one-way: "+messageName);
 
       Object response = null;
-      
+
       try {
         REMOTE.set(remote);
         response = respond(m, request);
@@ -157,7 +157,7 @@
       } finally {
         REMOTE.set(null);
       }
-      
+
       if (m.isOneWay() && wasConnected)           // no response data
         return null;
 
@@ -183,7 +183,7 @@
     }
     out.flush();
     payload = bbo.getBufferList();
-    
+
     // Grab meta-data from plugins
     context.setResponsePayload(payload);
     for (RPCPlugin plugin : rpcMetaPlugins) {
@@ -225,7 +225,7 @@
       response.serverProtocol = local.toString();
       response.serverHash = localHash;
     }
-    
+
     RPCContext context = new RPCContext();
     context.setHandshakeRequest(request);
     context.setHandshakeResponse(response);
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java
index 880c7a5..aba720c 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java
@@ -55,7 +55,7 @@
   private boolean saslResponsePiggybacked;
 
   private Protocol remote;
-  
+
   private ByteBuffer readHeader = ByteBuffer.allocate(4);
   private ByteBuffer writeHeader = ByteBuffer.allocate(4);
   private ByteBuffer zeroHeader = ByteBuffer.allocate(4).putInt(0);
@@ -128,7 +128,7 @@
       if (sasl.isComplete())
         saslResponsePiggybacked = true;
     }
-    
+
     while (!sasl.isComplete()) {
       Status status  = readStatus();
       ByteBuffer frame = readFrame();
@@ -140,7 +140,7 @@
           write(Status.FAIL, "Wrong mechanism: "+mechanism);
           throw new SaslException("Wrong mechanism: "+mechanism);
         }
-      case CONTINUE: 
+      case CONTINUE:
         byte[] response;
         try {
           response = sasl.evaluate(frame.array());
@@ -213,7 +213,7 @@
     read(buffer);
     return buffer;
   }
-  
+
   private void read(ByteBuffer buffer) throws IOException {
     buffer.clear();
     while (buffer.hasRemaining())
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Server.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Server.java
index 4ae6053..aa0efe3 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Server.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Server.java
@@ -28,8 +28,8 @@
 
   /** Stop this server. */
   void close();
-  
+
   /** Wait for this server to exit. */
   void join() throws InterruptedException;
-  
+
 }
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketServer.java
index 8db5d66..580c16e 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketServer.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketServer.java
@@ -83,7 +83,7 @@
   }
 
   public void close() {
-    this.interrupt(); 
+    this.interrupt();
     group.interrupt();
   }
 
@@ -128,7 +128,7 @@
     }
 
   }
-  
+
   public static void main(String[] arg) throws Exception {
     Responder responder =
       new GenericResponder(Protocol.parse("{\"protocol\": \"X\"}")) {
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketTransceiver.java
index e2178c6..50bc5be 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketTransceiver.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketTransceiver.java
@@ -43,7 +43,7 @@
   private ByteBuffer header = ByteBuffer.allocate(4);
 
   private Protocol remote;
-  
+
   public SocketTransceiver(SocketAddress address) throws IOException {
     this(SocketChannel.open(address));
   }
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Transceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Transceiver.java
index 898fd77..820ba17 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Transceiver.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Transceiver.java
@@ -31,14 +31,14 @@
   private final ReentrantLock channelLock = new ReentrantLock();
 
   public abstract String getRemoteName() throws IOException;
-  
+
   /**
    * Acquires an exclusive lock on the transceiver's channel.
    */
   public void lockChannel() {
     channelLock.lock();
   }
-  
+
   /**
    * Releases the lock on the transceiver's channel if held by the calling thread.
    */
@@ -61,8 +61,8 @@
       unlockChannel();
     }
   }
-  
-  /** 
+
+  /**
    * Called by {@link Requestor#request(String,Object,Callback)} for two-way messages using callbacks.
    */
   public void transceive(List<ByteBuffer> request, Callback<List<ByteBuffer>> callback)
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/generic/GenericResponder.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/generic/GenericResponder.java
index c5beac0..1ed6bc7 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/generic/GenericResponder.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/generic/GenericResponder.java
@@ -38,7 +38,7 @@
 
   public GenericResponder(Protocol local) {
     this(local, GenericData.get());
-    
+
   }
 
   public GenericResponder(Protocol local, GenericData data) {
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectRequestor.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectRequestor.java
index 84d798e..29903ec 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectRequestor.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectRequestor.java
@@ -33,7 +33,7 @@
 
 /** A {@link org.apache.avro.ipc.Requestor} for existing interfaces. */
 public class ReflectRequestor extends SpecificRequestor {
-  
+
   public ReflectRequestor(Class<?> iface, Transceiver transceiver)
     throws IOException {
     this(iface, transceiver, new ReflectData(iface.getClassLoader()));
@@ -43,19 +43,19 @@
     throws IOException {
     this(protocol, transceiver, ReflectData.get());
   }
-    
+
   public ReflectRequestor(Class<?> iface, Transceiver transceiver,
                           ReflectData data)
     throws IOException {
     this(data.getProtocol(iface), transceiver, data);
   }
-    
+
   public ReflectRequestor(Protocol protocol, Transceiver transceiver,
                           ReflectData data)
     throws IOException {
     super(protocol, transceiver, data);
   }
-    
+
   public ReflectData getReflectData() { return (ReflectData)getSpecificData(); }
 
   @Override
@@ -69,7 +69,7 @@
   }
 
   /** Create a proxy instance whose methods invoke RPCs. */
-  public static <T> T getClient(Class<T> iface, Transceiver transciever) 
+  public static <T> T getClient(Class<T> iface, Transceiver transciever)
     throws IOException {
     return getClient(iface, transciever,
                      new ReflectData(iface.getClassLoader()));
@@ -81,16 +81,16 @@
                                 ReflectData reflectData) throws IOException {
     Protocol protocol = reflectData.getProtocol(iface);
     return (T)Proxy.newProxyInstance
-      (reflectData.getClassLoader(), 
+      (reflectData.getClassLoader(),
        new Class[] { iface },
        new ReflectRequestor(protocol, transciever, reflectData));
   }
-  
+
   /** Create a proxy instance whose methods invoke RPCs. */
   @SuppressWarnings("unchecked")
-  public static <T> T getClient(Class<T> iface, ReflectRequestor rreq) 
+  public static <T> T getClient(Class<T> iface, ReflectRequestor rreq)
     throws IOException {
-    return (T)Proxy.newProxyInstance(rreq.getReflectData().getClassLoader(), 
+    return (T)Proxy.newProxyInstance(rreq.getReflectData().getClassLoader(),
                                   new Class[] { iface }, rreq);
   }
 }
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectResponder.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectResponder.java
index 3e66943..b9d8eff 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectResponder.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectResponder.java
@@ -35,7 +35,7 @@
   public ReflectResponder(Class iface, Object impl) {
     this(iface, impl, new ReflectData(impl.getClass().getClassLoader()));
   }
-  
+
   public ReflectResponder(Protocol protocol, Object impl) {
     this(protocol, impl, new ReflectData(impl.getClass().getClassLoader()));
   }
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificRequestor.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificRequestor.java
index d464737..5bcddc3 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificRequestor.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificRequestor.java
@@ -124,12 +124,12 @@
             throw e;
           }
         }
-      
+
         // Next, check for RuntimeExceptions:
         if (e instanceof RuntimeException) {
           throw e;
         }
-      
+
         // Not an expected Exception, so wrap it in AvroRemoteException:
         throw new AvroRemoteException(e);
       }
@@ -157,7 +157,7 @@
     for (Schema.Field param : schema.getFields())
       getDatumWriter(param.schema()).write(args[i++], out);
   }
-    
+
   @Override
   public Object readResponse(Schema writer, Schema reader, Decoder in)
     throws IOException {
@@ -203,7 +203,7 @@
   /** Return the remote protocol for a proxy. */
   public static Protocol getRemote(Object proxy) throws IOException {
     return ((Requestor)Proxy.getInvocationHandler(proxy)).getRemote();
-    
+
   }
 
 }
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificResponder.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificResponder.java
index ae3a30d..5e84d14 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificResponder.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificResponder.java
@@ -42,7 +42,7 @@
   public SpecificResponder(Class iface, Object impl) {
     this(iface, impl, new SpecificData(impl.getClass().getClassLoader()));
   }
-    
+
   public SpecificResponder(Protocol protocol, Object impl) {
     this(protocol, impl, new SpecificData(impl.getClass().getClassLoader()));
   }
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Histogram.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Histogram.java
index 521e1c2..6fef833 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Histogram.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Histogram.java
@@ -39,8 +39,8 @@
   /**
    * How many recent additions we should track.
    */
-  public static final int MAX_HISTORY_SIZE = 20; 
-  
+  public static final int MAX_HISTORY_SIZE = 20;
+
   private Segmenter<B, T> segmenter;
   private int[] counts;
   protected int totalCount;
@@ -67,13 +67,13 @@
      * is consistent with the segment numbers.
      */
     Iterator<B> getBuckets();
-    
+
     /**
      * Returns a List of bucket boundaries. Useful for printing
      * segmenters.
      */
     List<String> getBoundaryLabels();
-    
+
     /**
      * Returns the bucket labels as an array;
      */
@@ -116,7 +116,7 @@
     private String rangeAsString(T a, T b) {
       return String.format("[%s,%s)", a, b == null ? "infinity" : b);
     }
-    
+
     @Override
     public ArrayList<String> getBoundaryLabels() {
       ArrayList<String> outArray = new ArrayList<String>(index.keySet().size());
@@ -125,7 +125,7 @@
       }
       return outArray;
     }
-    
+
     @Override
     public ArrayList<String> getBucketLabels() {
       ArrayList<String> outArray = new ArrayList<String>(index.keySet().size());
@@ -135,14 +135,14 @@
       }
       return outArray;
     }
-    
+
     @Override
     public Iterator<String> getBuckets() {
       return new Iterator<String>() {
         Iterator<T> it = index.keySet().iterator();
         T cur = it.next(); // there's always at least one element
         int pos = 0;
-        
+
         @Override
         public boolean hasNext() {
           return (pos < index.keySet().size());
@@ -190,14 +190,14 @@
   public int[] getHistogram() {
     return counts;
   }
-  
+
   /**
    * Returns the underlying segmenter used for this histogram.
    */
   public Segmenter<B, T> getSegmenter() {
     return this.segmenter;
   }
-  
+
   /**
    * Returns values recently added to this histogram. These are in reverse
    * order (most recent first).
@@ -210,7 +210,7 @@
   public int getCount() {
     return totalCount;
   }
-  
+
 
   public String toString() {
     StringBuilder sb = new StringBuilder();
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StaticServlet.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StaticServlet.java
index c079ec5..88a50f8 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StaticServlet.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StaticServlet.java
@@ -29,7 +29,7 @@
  */
 public class StaticServlet extends DefaultServlet {
   public Resource getResource(String pathInContext) {
-    // Take only last slice of the URL as a filename, so we can adjust path. 
+    // Take only last slice of the URL as a filename, so we can adjust path.
     // This also prevents mischief like '../../foo.css'
     String[] parts = pathInContext.split("/");
     String filename =  parts[parts.length - 1];
@@ -43,4 +43,4 @@
       return null;
     }
   }
-} 
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsPlugin.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsPlugin.java
index 565f532..6301bbe 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsPlugin.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsPlugin.java
@@ -36,7 +36,7 @@
 /**
  * Collects count and latency statistics about RPC calls.  Keeps
  * data for every method. Can be added to a Requestor (client)
- * or Responder (server). 
+ * or Responder (server).
  *
  * This uses milliseconds as the standard unit of measure
  * throughout the class, stored in floats.
@@ -76,9 +76,9 @@
          2000,
          5000,
         10000,
-        50000, 
+        50000,
        100000)));
-  
+
   /** Per-method histograms.
    * Must be accessed while holding a lock. */
   Map<Message, FloatHistogram<?>> methodTimings =
@@ -86,10 +86,10 @@
 
   Map<Message, IntegerHistogram<?>> sendPayloads =
     new HashMap<Message, IntegerHistogram<?>>();
-  
+
   Map<Message, IntegerHistogram<?>> receivePayloads =
     new HashMap<Message, IntegerHistogram<?>>();
-  
+
   /** RPCs in flight. */
   ConcurrentMap<RPCContext, Stopwatch> activeRpcs =
     new ConcurrentHashMap<RPCContext, Stopwatch>();
@@ -97,12 +97,12 @@
 
   /** How long I've been alive */
   public Date startupTime = new Date();
-  
+
   private Segmenter<?, Float> floatSegmenter;
   private Segmenter<?, Integer> integerSegmenter;
 
   /** Construct a plugin with custom Ticks and Segmenter implementations. */
-  StatsPlugin(Ticks ticks, Segmenter<?, Float> floatSegmenter, 
+  StatsPlugin(Ticks ticks, Segmenter<?, Float> floatSegmenter,
       Segmenter<?, Integer> integerSegmenter) {
     this.floatSegmenter = floatSegmenter;
     this.integerSegmenter = integerSegmenter;
@@ -114,7 +114,7 @@
   public StatsPlugin() {
     this(Stopwatch.SYSTEM_TICKS, LATENCY_SEGMENTER, PAYLOAD_SEGMENTER);
   }
-  
+
   /**
    * Helper to get the size of an RPC payload.
    */
@@ -122,12 +122,12 @@
     if (payload == null) {
       return 0;
     }
-    
+
     int size = 0;
     for (ByteBuffer bb: payload) {
       size = size + bb.limit();
     }
-    
+
     return size;
   }
 
@@ -136,7 +136,7 @@
     Stopwatch t = new Stopwatch(ticks);
     t.start();
     this.activeRpcs.put(context, t);
-    
+
     synchronized(receivePayloads) {
       IntegerHistogram<?> h = receivePayloads.get(context.getMessage());
       if (h == null) {
@@ -146,13 +146,13 @@
       h.add(getPayloadSize(context.getRequestPayload()));
     }
   }
-  
+
   @Override
   public void serverSendResponse(RPCContext context) {
     Stopwatch t = this.activeRpcs.remove(context);
     t.stop();
     publish(context, t);
-    
+
     synchronized(sendPayloads) {
       IntegerHistogram<?> h = sendPayloads.get(context.getMessage());
       if (h == null) {
@@ -162,13 +162,13 @@
       h.add(getPayloadSize(context.getResponsePayload()));
     }
   }
-  
+
   @Override
   public void clientSendRequest(RPCContext context) {
     Stopwatch t = new Stopwatch(ticks);
     t.start();
     this.activeRpcs.put(context, t);
-    
+
     synchronized(sendPayloads) {
       IntegerHistogram<?> h = sendPayloads.get(context.getMessage());
       if (h == null) {
@@ -178,13 +178,13 @@
       h.add(getPayloadSize(context.getRequestPayload()));
     }
   }
-  
+
   @Override
   public void clientReceiveResponse(RPCContext context) {
     Stopwatch t = this.activeRpcs.remove(context);
     t.stop();
     publish(context, t);
-    
+
     synchronized(receivePayloads) {
       IntegerHistogram<?> h = receivePayloads.get(context.getMessage());
       if (h == null) {
@@ -194,7 +194,7 @@
       h.add(getPayloadSize(context.getRequestPayload()));
     }
   }
-  
+
   /** Adds timing to the histograms. */
   private void publish(RPCContext context, Stopwatch t) {
     Message message = context.getMessage();
@@ -218,7 +218,7 @@
   private IntegerHistogram<?> createNewIntegerHistogram() {
     return new IntegerHistogram(integerSegmenter);
   }
-  
+
   /** Converts nanoseconds to milliseconds. */
   static float nanosToMillis(long elapsedNanos) {
     return elapsedNanos / 1000000.0f;
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServer.java
index 3ae8ada..1b2e54b 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServer.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServer.java
@@ -21,32 +21,32 @@
 import org.mortbay.jetty.servlet.ServletHolder;
 
 /* This is a server that displays live information from a StatsPlugin.
- * 
+ *
  *  Typical usage is as follows:
- *    StatsPlugin plugin = new StatsPlugin(); 
+ *    StatsPlugin plugin = new StatsPlugin();
  *    requestor.addPlugin(plugin);
  *    StatsServer server = new StatsServer(plugin, 8080);
- *    
+ *
  *  */
 public class StatsServer {
   Server httpServer;
   StatsPlugin plugin;
-  
-  /* Start a stats server on the given port, 
+
+  /* Start a stats server on the given port,
    * responsible for the given plugin. */
   public StatsServer(StatsPlugin plugin, int port) throws Exception {
     this.httpServer = new Server(port);
     this.plugin = plugin;
-    
+
     Context staticContext = new Context(httpServer, "/static");
     staticContext.addServlet(new ServletHolder(new StaticServlet()), "/");
-    
+
     Context context = new Context(httpServer, "/");
     context.addServlet(new ServletHolder(new StatsServlet(plugin)), "/");
-    
+
     httpServer.start();
   }
-  
+
   /* Stops this server. */
   public void stop() throws Exception {
     this.httpServer.stop();
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServlet.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServlet.java
index 3af2ffd..075bff2 100644
--- a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServlet.java
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServlet.java
@@ -49,49 +49,49 @@
  * This class follows the same synchronization conventions
  * as StatsPlugin, to avoid requiring StatsPlugin to serve
  * a copy of the data.
- */ 
+ */
 public class StatsServlet extends HttpServlet {
   private final StatsPlugin statsPlugin;
   private VelocityEngine velocityEngine;
-  private static final SimpleDateFormat FORMATTER = 
+  private static final SimpleDateFormat FORMATTER =
     new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss");
 
   public StatsServlet(StatsPlugin statsPlugin) throws UnavailableException {
     this.statsPlugin = statsPlugin;
     this.velocityEngine = new VelocityEngine();
-    
+
     // These two properties tell Velocity to use its own classpath-based loader
     velocityEngine.addProperty("resource.loader", "class");
     velocityEngine.addProperty("class.resource.loader.class",
         "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
-    
+
     velocityEngine.setProperty("runtime.references.strict", true);
     String logChuteName = "org.apache.velocity.runtime.log.NullLogChute";
     velocityEngine.setProperty("runtime.log.logsystem.class", logChuteName);
   }
-  
+
   /* Helper class to store per-message data which is passed to templates.
-   * 
+   *
    * The template expects a list of charts, each of which is parameterized by
    * map key-value string attributes. */
   public class RenderableMessage { // Velocity brakes if not public
     public String name;
     public int numCalls;
     public ArrayList<HashMap<String, String>> charts;
-    
+
     public RenderableMessage(String name) {
       this.name = name;
       this.charts = new ArrayList<HashMap<String, String>>();
     }
-    
+
     public ArrayList<HashMap<String, String>> getCharts() {
       return this.charts;
     }
-    
+
     public String getname() {
       return this.name;
     }
-    
+
     public int getNumCalls() {
       return this.numCalls;
     }
@@ -99,9 +99,9 @@
 
   /* Surround each string in an array with
    * quotation marks and escape existing quotes.
-   * 
+   *
    * This is useful when we have an array of strings that we want to turn into
-   * a javascript array declaration. 
+   * a javascript array declaration.
    */
   protected static List<String> escapeStringArray(List<String> input) {
     for (int i = 0; i < input.size(); i++) {
@@ -109,16 +109,16 @@
     }
     return input;
   }
-  
+
   @Override
   protected void doGet(HttpServletRequest req, HttpServletResponse resp)
       throws ServletException, IOException {
     resp.setContentType("text/html");
     String url = req.getRequestURL().toString();
     String[] parts = url.split("//")[1].split("/");
-    
+
     try {
-      writeStats(resp.getWriter()); 
+      writeStats(resp.getWriter());
     }
     catch (Exception e) {
       e.printStackTrace();
@@ -127,34 +127,34 @@
 
   void writeStats(Writer w) throws IOException {
     VelocityContext context = new VelocityContext();
-    context.put("title", "Avro RPC Stats"); 
-    
+    context.put("title", "Avro RPC Stats");
+
     ArrayList<String> rpcs = new ArrayList<String>();  // in flight rpcs
-    
-    ArrayList<RenderableMessage> messages = 
+
+    ArrayList<RenderableMessage> messages =
       new ArrayList<RenderableMessage>();
-    
-    for (Entry<RPCContext, Stopwatch> rpc : 
+
+    for (Entry<RPCContext, Stopwatch> rpc :
          this.statsPlugin.activeRpcs.entrySet()) {
       rpcs.add(renderActiveRpc(rpc.getKey(), rpc.getValue()));
     }
-    
+
     // Get set of all seen messages
     Set<Message> keys = null;
     synchronized(this.statsPlugin.methodTimings) {
        keys = this.statsPlugin.methodTimings.keySet();
-    
+
       for (Message m: keys) {
         messages.add(renderMethod(m));
       }
     }
-    
+
     context.put("inFlightRpcs", rpcs);
     context.put("messages", messages);
-    
+
     context.put("currTime", FORMATTER.format(new Date()));
     context.put("startupTime", FORMATTER.format(statsPlugin.startupTime));
-    
+
     Template t;
     try {
       t = velocityEngine.getTemplate(
@@ -169,22 +169,22 @@
     t.merge(context, w);
   }
 
-  private String renderActiveRpc(RPCContext rpc, Stopwatch stopwatch) 
+  private String renderActiveRpc(RPCContext rpc, Stopwatch stopwatch)
       throws IOException {
     String out = new String();
-    out += rpc.getMessage().getName() + ": " + 
+    out += rpc.getMessage().getName() + ": " +
         formatMillis(StatsPlugin.nanosToMillis(stopwatch.elapsedNanos()));
     return out;
   }
 
-  
+
   private RenderableMessage renderMethod(Message message) {
     RenderableMessage out = new RenderableMessage(message.getName());
-    
+
     synchronized(this.statsPlugin.methodTimings) {
       FloatHistogram<?> hist = this.statsPlugin.methodTimings.get(message);
       out.numCalls = hist.getCount();
-      
+
       HashMap<String, String> latencyBar = new HashMap<String, String>();
       // Fill in chart attributes for velocity
       latencyBar.put("type", "bar");
@@ -193,22 +193,22 @@
       latencyBar.put("numCalls", Integer.toString(hist.getCount()));
       latencyBar.put("avg", Float.toString(hist.getMean()));
       latencyBar.put("stdDev", Float.toString(hist.getUnbiasedStdDev()));
-      latencyBar.put("labelStr", 
+      latencyBar.put("labelStr",
           Arrays.toString(hist.getSegmenter().getBoundaryLabels().toArray()));
       latencyBar.put("boundaryStr",
           Arrays.toString(escapeStringArray(hist.getSegmenter().
               getBucketLabels()).toArray()));
-      latencyBar.put("dataStr", Arrays.toString(hist.getHistogram())); 
+      latencyBar.put("dataStr", Arrays.toString(hist.getHistogram()));
       out.charts.add(latencyBar);
-      
+
       HashMap<String, String> latencyDot = new HashMap<String, String>();
       latencyDot.put("title", "Latency");
       latencyDot.put("type", "dot");
-      latencyDot.put("dataStr", 
+      latencyDot.put("dataStr",
           Arrays.toString(hist.getRecentAdditions().toArray()));
       out.charts.add(latencyDot);
     }
-    
+
     synchronized(this.statsPlugin.sendPayloads) {
       IntegerHistogram<?> hist = this.statsPlugin.sendPayloads.get(message);
       HashMap<String, String> latencyBar = new HashMap<String, String>();
@@ -219,22 +219,22 @@
       latencyBar.put("numCalls", Integer.toString(hist.getCount()));
       latencyBar.put("avg", Float.toString(hist.getMean()));
       latencyBar.put("stdDev", Float.toString(hist.getUnbiasedStdDev()));
-      latencyBar.put("labelStr", 
+      latencyBar.put("labelStr",
           Arrays.toString(hist.getSegmenter().getBoundaryLabels().toArray()));
       latencyBar.put("boundaryStr",
           Arrays.toString(escapeStringArray(hist.getSegmenter().
               getBucketLabels()).toArray()));
-      latencyBar.put("dataStr", Arrays.toString(hist.getHistogram())); 
+      latencyBar.put("dataStr", Arrays.toString(hist.getHistogram()));
       out.charts.add(latencyBar);
-      
+
       HashMap<String, String> latencyDot = new HashMap<String, String>();
       latencyDot.put("title", "Send Payload");
       latencyDot.put("type", "dot");
-      latencyDot.put("dataStr", 
+      latencyDot.put("dataStr",
           Arrays.toString(hist.getRecentAdditions().toArray()));
       out.charts.add(latencyDot);
     }
-    
+
     synchronized(this.statsPlugin.receivePayloads) {
       IntegerHistogram<?> hist = this.statsPlugin.receivePayloads.get(message);
       HashMap<String, String> latencyBar = new HashMap<String, String>();
@@ -245,25 +245,25 @@
       latencyBar.put("numCalls", Integer.toString(hist.getCount()));
       latencyBar.put("avg", Float.toString(hist.getMean()));
       latencyBar.put("stdDev", Float.toString(hist.getUnbiasedStdDev()));
-      latencyBar.put("labelStr", 
+      latencyBar.put("labelStr",
           Arrays.toString(hist.getSegmenter().getBoundaryLabels().toArray()));
       latencyBar.put("boundaryStr",
           Arrays.toString(escapeStringArray(hist.getSegmenter().
               getBucketLabels()).toArray()));
-      latencyBar.put("dataStr", Arrays.toString(hist.getHistogram())); 
+      latencyBar.put("dataStr", Arrays.toString(hist.getHistogram()));
       out.charts.add(latencyBar);
-      
+
       HashMap<String, String> latencyDot = new HashMap<String, String>();
       latencyDot.put("title", "Recv Payload");
       latencyDot.put("type", "dot");
-      latencyDot.put("dataStr", 
+      latencyDot.put("dataStr",
           Arrays.toString(hist.getRecentAdditions().toArray()));
       out.charts.add(latencyDot);
     }
-    
+
     return out;
   }
-  
+
   private CharSequence formatMillis(float millis) {
     return String.format("%.0fms", millis);
   }
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/DataFileInteropTest.java b/lang/java/ipc/src/test/java/org/apache/avro/DataFileInteropTest.java
index dd64bf5..60862e2 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/DataFileInteropTest.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/DataFileInteropTest.java
@@ -32,7 +32,7 @@
 
 public class DataFileInteropTest {
 
-  private static final File DATAFILE_DIR = 
+  private static final File DATAFILE_DIR =
     new File(System.getProperty("test.dir", "/tmp"));
 
   @BeforeClass
@@ -45,8 +45,8 @@
   public void testGeneratedGeneric() throws IOException {
     System.out.println("Reading with generic:");
     DatumReaderProvider<Object> provider = new DatumReaderProvider<Object>() {
-      @Override public DatumReader<Object> get() { 
-        return new GenericDatumReader<Object>(); 
+      @Override public DatumReader<Object> get() {
+        return new GenericDatumReader<Object>();
         }
       };
     readFiles(provider);
@@ -56,15 +56,15 @@
   public void testGeneratedSpecific() throws IOException {
     System.out.println("Reading with specific:");
     DatumReaderProvider<Interop> provider = new DatumReaderProvider<Interop>() {
-      @Override public DatumReader<Interop> get() { 
-        return new SpecificDatumReader<Interop>(); 
+      @Override public DatumReader<Interop> get() {
+        return new SpecificDatumReader<Interop>();
         }
       };
     readFiles(provider);
   }
 
   // Can't use same Interop.java as specific for reflect.
-  // This used to be the case because one used Utf8 and the other Sring, but 
+  // This used to be the case because one used Utf8 and the other Sring, but
   // we use CharSequence now.
   // The current incompatibility is now that one uses byte[] and the other ByteBuffer
 
@@ -78,8 +78,8 @@
 //   @Test
 //   public void testGeneratedReflect() throws IOException {
 //     DatumReaderProvider<Interop> provider = new DatumReaderProvider<Interop>() {
-//       @Override public DatumReader<Interop> get() { 
-//         return new ReflectDatumReader<Interop>(Interop.class); 
+//       @Override public DatumReader<Interop> get() {
+//         return new ReflectDatumReader<Interop>(Interop.class);
 //         }
 //       };
 //     readFiles(provider);
@@ -95,7 +95,7 @@
       }
     }
   }
-  
+
   interface DatumReaderProvider<T extends Object> {
     public DatumReader<T> get();
   }
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java b/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java
index 96e2e62..feefd80 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java
@@ -30,157 +30,157 @@
  * This plugin tests handshake and call state by passing a string as metadata,
  * slowly building it up at each instrumentation point, testing it as it goes.
  * Finally, after the call or handshake is complete, the constructed string is
- * tested. It also tests that RPC context data is appropriately filled in 
+ * tested. It also tests that RPC context data is appropriately filled in
  * along the way by Requestor and Responder classes.
  */
 public final class RPCMetaTestPlugin extends RPCPlugin {
-  
+
   protected final String key;
-  
+
   public RPCMetaTestPlugin(String keyname) {
     key = keyname;
   }
-  
+
   @Override
   public void clientStartConnect(RPCContext context) {
     ByteBuffer buf = ByteBuffer.wrap("ap".getBytes());
     context.requestHandshakeMeta().put(key, buf);
   }
-  
+
   @Override
   public void serverConnecting(RPCContext context) {
-    
+
     Assert.assertNotNull(context.requestHandshakeMeta());
     Assert.assertNotNull(context.responseHandshakeMeta());
     Assert.assertNull(context.getRequestPayload());
     Assert.assertNull(context.getResponsePayload());
-    
+
     if (!context.requestHandshakeMeta().containsKey(key)) return;
-    
+
     ByteBuffer buf = context.requestHandshakeMeta().get(key);
     Assert.assertNotNull(buf);
     Assert.assertNotNull(buf.array());
-    
+
     String partialstr = new String(buf.array());
     Assert.assertNotNull(partialstr);
     Assert.assertEquals("partial string mismatch", "ap", partialstr);
-    
+
     buf = ByteBuffer.wrap((partialstr + "ac").getBytes());
     Assert.assertTrue(buf.remaining() > 0);
     context.responseHandshakeMeta().put(key, buf);
   }
-  
+
   @Override
   public void clientFinishConnect(RPCContext context) {
     Map<String,ByteBuffer> handshakeMeta = context.responseHandshakeMeta();
-    
+
     Assert.assertNull(context.getRequestPayload());
     Assert.assertNull(context.getResponsePayload());
     Assert.assertNotNull(handshakeMeta);
-    
+
     if (!handshakeMeta.containsKey(key)) return;
-    
+
     ByteBuffer buf = handshakeMeta.get(key);
     Assert.assertNotNull(buf);
     Assert.assertNotNull(buf.array());
-    
+
     String partialstr = new String(buf.array());
     Assert.assertNotNull(partialstr);
     Assert.assertEquals("partial string mismatch", "apac", partialstr);
-    
+
     buf = ByteBuffer.wrap((partialstr + "he").getBytes());
     Assert.assertTrue(buf.remaining() > 0);
     handshakeMeta.put(key, buf);
-    
+
     checkRPCMetaMap(handshakeMeta);
   }
-  
+
   @Override
-  public void clientSendRequest(RPCContext context) { 
+  public void clientSendRequest(RPCContext context) {
     ByteBuffer buf = ByteBuffer.wrap("ap".getBytes());
     context.requestCallMeta().put(key, buf);
     Assert.assertNotNull(context.getMessage());
     Assert.assertNotNull(context.getRequestPayload());
     Assert.assertNull(context.getResponsePayload());
   }
-  
+
   @Override
   public void serverReceiveRequest(RPCContext context) {
     Map<String,ByteBuffer> meta = context.requestCallMeta();
-    
-    Assert.assertNotNull(meta);    
+
+    Assert.assertNotNull(meta);
     Assert.assertNotNull(context.getMessage());
     Assert.assertNull(context.getResponsePayload());
-    
+
     if (!meta.containsKey(key)) return;
-    
+
     ByteBuffer buf = meta.get(key);
     Assert.assertNotNull(buf);
     Assert.assertNotNull(buf.array());
-    
+
     String partialstr = new String(buf.array());
     Assert.assertNotNull(partialstr);
     Assert.assertEquals("partial string mismatch", "ap", partialstr);
-    
+
     buf = ByteBuffer.wrap((partialstr + "a").getBytes());
     Assert.assertTrue(buf.remaining() > 0);
     meta.put(key, buf);
   }
-  
+
   @Override
   public void serverSendResponse(RPCContext context) {
     Assert.assertNotNull(context.requestCallMeta());
     Assert.assertNotNull(context.responseCallMeta());
 
     Assert.assertNotNull(context.getResponsePayload());
-    
+
     if (!context.requestCallMeta().containsKey(key)) return;
-    
+
     ByteBuffer buf = context.requestCallMeta().get(key);
     Assert.assertNotNull(buf);
     Assert.assertNotNull(buf.array());
-    
+
     String partialstr = new String(buf.array());
     Assert.assertNotNull(partialstr);
     Assert.assertEquals("partial string mismatch", "apa", partialstr);
-    
+
     buf = ByteBuffer.wrap((partialstr + "c").getBytes());
     Assert.assertTrue(buf.remaining() > 0);
     context.responseCallMeta().put(key, buf);
   }
-  
+
   @Override
   public void clientReceiveResponse(RPCContext context) {
     Assert.assertNotNull(context.responseCallMeta());
     Assert.assertNotNull(context.getRequestPayload());
-    
+
     if (!context.responseCallMeta().containsKey(key)) return;
-    
+
     ByteBuffer buf = context.responseCallMeta().get(key);
     Assert.assertNotNull(buf);
     Assert.assertNotNull(buf.array());
-    
+
     String partialstr = new String(buf.array());
     Assert.assertNotNull(partialstr);
     Assert.assertEquals("partial string mismatch", "apac", partialstr);
-    
+
     buf = ByteBuffer.wrap((partialstr + "he").getBytes());
     Assert.assertTrue(buf.remaining() > 0);
     context.responseCallMeta().put(key, buf);
-    
+
     checkRPCMetaMap(context.responseCallMeta());
   }
-  
+
   protected void checkRPCMetaMap(Map<String,ByteBuffer> rpcMeta) {
     Assert.assertNotNull(rpcMeta);
     Assert.assertTrue("key not present in map", rpcMeta.containsKey(key));
-    
+
     ByteBuffer keybuf = rpcMeta.get(key);
     Assert.assertNotNull(keybuf);
     Assert.assertTrue("key BB had nothing remaining", keybuf.remaining() > 0);
-    
+
     String str = new String(keybuf.array());
     Assert.assertEquals("apache", str);
   }
-  
+
 }
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/RandomData.java b/lang/java/ipc/src/test/java/org/apache/avro/RandomData.java
index 49f8857..85da034 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/RandomData.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/RandomData.java
@@ -47,7 +47,7 @@
     this.seed = seed;
     this.count = count;
   }
-  
+
   public Iterator<Object> iterator() {
     return new Iterator<Object>() {
       private int n;
@@ -60,7 +60,7 @@
       public void remove() { throw new UnsupportedOperationException(); }
     };
   }
-  
+
   @SuppressWarnings(value="unchecked")
   private static Object generate(Schema schema, Random random, int d) {
     switch (schema.getType()) {
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestCompare.java b/lang/java/ipc/src/test/java/org/apache/avro/TestCompare.java
index 8f02022..20efe17 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestCompare.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestCompare.java
@@ -184,7 +184,7 @@
     s2.setKind(Kind.BAZ);
     check(schema, s1, s2, true, new SpecificDatumWriter<TestRecord>(schema),
           SpecificData.get());
-  }  
+  }
 
   private static <T> void check(String schemaJson, T o1, T o2)
     throws Exception {
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestDataFileSpecific.java b/lang/java/ipc/src/test/java/org/apache/avro/TestDataFileSpecific.java
index 9d85a20..9f73b86 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestDataFileSpecific.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestDataFileSpecific.java
@@ -68,7 +68,7 @@
     DataFileReader<Foo> reader =
       new DataFileReader<Foo>(FILE, new SpecificDatumReader<Foo>());
     int i = 0;
-    for (Foo f : reader) 
+    for (Foo f : reader)
       Assert.assertEquals(""+(i++), f.getLabel().toString());
     Assert.assertEquals(10, i);
     reader.close();
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolDatagram.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolDatagram.java
index 1f16acc..300ca37 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolDatagram.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolDatagram.java
@@ -35,7 +35,7 @@
         new InetSocketAddress("localhost",
             new Random().nextInt(10000)+10000));
   }
-  
+
   @Override
   public Transceiver createTransceiver() throws Exception{
     return new DatagramTransceiver(new InetSocketAddress("localhost", server.getPort()));
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGeneric.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGeneric.java
index 1309ea1..2e3dad2 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGeneric.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGeneric.java
@@ -90,7 +90,7 @@
         error.put("message", new Utf8("an error"));
         throw new AvroRemoteException(error);
       }
-      
+
       throw new AvroRuntimeException("unexpected message: "+message.getName());
     }
 
@@ -111,7 +111,7 @@
 
   @Test
   public void testHello() throws IOException {
-    GenericRecord params = 
+    GenericRecord params =
       new GenericData.Record(PROTOCOL.getMessages().get("hello").getRequest());
     params.put("greeting", new Utf8("bob"));
     Utf8 response = (Utf8)requestor.request("hello", params);
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGenericMeta.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGenericMeta.java
index 5bc46f6..ed24444 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGenericMeta.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGenericMeta.java
@@ -26,7 +26,7 @@
 import org.junit.Before;
 
 public class TestProtocolGenericMeta extends TestProtocolGeneric {
-  
+
   @Before @Override
   public void testStartServer() throws Exception {
     if (server != null) return;
@@ -35,7 +35,7 @@
     responder.addRPCPlugin(new RPCMetaTestPlugin("key2"));
     server = new SocketServer(responder, new InetSocketAddress(0));
     server.start();
-    
+
     client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
     requestor = new GenericRequestor(PROTOCOL, client);
     requestor.addRPCPlugin(new RPCMetaTestPlugin("key1"));
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttp.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttp.java
index dc460c5..c162c09 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttp.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttp.java
@@ -42,12 +42,12 @@
   public Server createServer(Responder testResponder) throws Exception {
     return new HttpServer(testResponder, 0);
   }
-  
+
   @Override
   public Transceiver createTransceiver() throws Exception{
     return new HttpTransceiver(new URL("http://127.0.0.1:"+server.getPort()+"/"));
   }
- 
+
   protected int getExpectedHandshakeCount() {
     return REPEATING;
   }
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttps.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttps.java
index 1c87955..f770dae 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttps.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttps.java
@@ -57,12 +57,12 @@
     connector.setNeedClientAuth(false);
     return new HttpServer(testResponder, connector);
   }
-  
+
   @Override
   public Transceiver createTransceiver() throws Exception{
     return new HttpTransceiver(new URL("https://localhost:"+server.getPort()+"/"));
   }
- 
+
   protected int getExpectedHandshakeCount() {
     return REPEATING;
   }
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolNetty.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolNetty.java
index ed73e78..07ba143 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolNetty.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolNetty.java
@@ -34,12 +34,12 @@
   public Server createServer(Responder testResponder) throws Exception {
     return new NettyServer(responder, new InetSocketAddress(0));
   }
-  
+
   @Override
   public Transceiver createTransceiver() throws Exception{
     return new NettyTransceiver(new InetSocketAddress(server.getPort()), 2000L);
   }
-  
+
   @Override
   protected int getExpectedHandshakeCount() {
     return REPEATING;
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflect.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflect.java
index 7567c6e..4c8b478 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflect.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflect.java
@@ -54,7 +54,7 @@
     byte[] echoBytes(byte[] data);
     void error() throws SimpleException;
   }
-  
+
   private static boolean throwUndeclaredError;
 
   public static class TestImpl implements Simple {
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflectMeta.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflectMeta.java
index c6394cd..91a54b7 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflectMeta.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflectMeta.java
@@ -36,7 +36,7 @@
     rresp.addRPCPlugin(new RPCMetaTestPlugin("key2"));
     server = new SocketServer(rresp, new InetSocketAddress(0));
     server.start();
-    
+
     client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
     ReflectRequestor requestor = new ReflectRequestor(Simple.class, client);
     requestor.addRPCPlugin(new RPCMetaTestPlugin("key1"));
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java
index f660f28..08b569a 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java
@@ -95,23 +95,23 @@
     responder = new SpecificResponder(Simple.class, new TestImpl());
     server = createServer(responder);
     server.start();
-    
+
     client = createTransceiver();
     SpecificRequestor req = new SpecificRequestor(Simple.class, client);
     addRpcPlugins(req);
     proxy = SpecificRequestor.getClient(Simple.class, (SpecificRequestor)req);
-    
+
     monitor = new HandshakeMonitor();
     responder.addRPCPlugin(monitor);
   }
-  
+
   public void addRpcPlugins(Requestor requestor){}
-  
+
   public Server createServer(Responder testResponder) throws Exception{
     return server = new SocketServer(testResponder,
-                              new InetSocketAddress(0));   
+                              new InetSocketAddress(0));
   }
-  
+
   public Transceiver createTransceiver() throws Exception{
     return new SocketTransceiver(new InetSocketAddress(server.getPort()));
   }
@@ -218,14 +218,14 @@
     try { Thread.sleep(100); } catch (InterruptedException e) {}
     assertEquals(2, ackCount);
   }
-  
+
   @Test
   public void testRepeatedAccess() throws Exception {
     for (int x = 0; x < 1000; x++) {
       proxy.hello("hi!");
     }
   }
-  
+
   @Test(expected = Exception.class)
   public void testConnectionRefusedOneWay() throws IOException {
     Transceiver client = new HttpTransceiver(new URL("http://localhost:4444"));
@@ -277,12 +277,12 @@
     server.close();
     server = null;
   }
-  
+
   public class HandshakeMonitor extends RPCPlugin{
-    
+
     private int handshakes;
     private HashSet<String> seenProtocols = new HashSet<String>();
-    
+
     @Override
     public void serverConnecting(RPCContext context) {
       handshakes++;
@@ -299,7 +299,7 @@
         seenProtocols.add(clientProtocol);
       }
     }
-    
+
     public void assertHandshake(){
       int expected = getExpectedHandshakeCount();
       if(expected != REPEATING){
@@ -307,7 +307,7 @@
       }
     }
   }
-  
+
   protected int getExpectedHandshakeCount() {
    return 3;
   }
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecificMeta.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecificMeta.java
index a2dd575..d38362e 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecificMeta.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecificMeta.java
@@ -28,19 +28,19 @@
 
 
 public class TestProtocolSpecificMeta extends TestProtocolSpecific {
-  
+
   @Override
   public Server createServer(Responder testResponder) throws Exception {
     responder.addRPCPlugin(new RPCMetaTestPlugin("key1"));
     responder.addRPCPlugin(new RPCMetaTestPlugin("key2"));
     return new SocketServer(responder, new InetSocketAddress(0));
   }
-  
+
   @Override
   public Transceiver createTransceiver() throws Exception {
     return new SocketTransceiver(new InetSocketAddress(server.getPort()));
   }
-  
+
   public void addRpcPlugins(Requestor req){
     req.addRPCPlugin(new RPCMetaTestPlugin("key1"));
     req.addRPCPlugin(new RPCMetaTestPlugin("key2"));
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestSchema.java b/lang/java/ipc/src/test/java/org/apache/avro/TestSchema.java
index b19b138..468a21b 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/TestSchema.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestSchema.java
@@ -172,16 +172,16 @@
     check("{\"type\":\"map\", \"values\":\"long\"}", "{\"a\":1}", map);
     checkParseError("{\"type\":\"map\"}");        // values required
   }
-  
+
   @Test
   public void testUnionMap() throws Exception {
     String unionMapSchema = "{\"name\":\"foo\", \"type\":\"record\"," +
-    		" \"fields\":[ {\"name\":\"mymap\", \"type\":" +
-    		"   [{\"type\":\"map\", \"values\":" +
-    		"      [\"int\",\"long\",\"float\",\"string\"]}," +
-    		"    \"null\"]" +
-    		"   }]" +
-    		" }";
+        " \"fields\":[ {\"name\":\"mymap\", \"type\":" +
+        "   [{\"type\":\"map\", \"values\":" +
+        "      [\"int\",\"long\",\"float\",\"string\"]}," +
+        "    \"null\"]" +
+        "   }]" +
+        " }";
     check(unionMapSchema, true);
   }
 
@@ -305,10 +305,10 @@
       // reasonable amount of time
       for (Schema s1 : recs) {
         Schema s2 = Schema.parse(s1.toString());
-        assertEquals(s1.hashCode(), s2.hashCode()); 
+        assertEquals(s1.hashCode(), s2.hashCode());
         assertEquals(s1, s2);
       }
-    }                 
+    }
   }
 
   @Test
@@ -370,15 +370,15 @@
     String namedTypes = ", {\"type\":\"record\",\"name\":\"Foo\",\"fields\":[]}," +
     " {\"type\":\"fixed\",\"name\":\"Bar\",\"size\": 1}," +
     " {\"type\":\"enum\",\"name\":\"Baz\",\"symbols\": [\"X\"]}";
-    
+
     String namedTypes2 = ", {\"type\":\"record\",\"name\":\"Foo2\",\"fields\":[]}," +
     " {\"type\":\"fixed\",\"name\":\"Bar2\",\"size\": 1}," +
     " {\"type\":\"enum\",\"name\":\"Baz2\",\"symbols\": [\"X\"]}";
-    
+
     check(partial + namedTypes + "]", false);
-    check(partial + namedTypes + namedTypes2 + "]", false); 
+    check(partial + namedTypes + namedTypes2 + "]", false);
     checkParseError(partial + namedTypes + namedTypes + "]");
-    
+
     // fail with two branches of the same unnamed type
     checkUnionError(new Schema[] {Schema.create(Type.INT), Schema.create(Type.INT)});
     checkUnionError(new Schema[] {Schema.create(Type.LONG), Schema.create(Type.LONG)});
@@ -387,14 +387,14 @@
     checkUnionError(new Schema[] {Schema.create(Type.BOOLEAN), Schema.create(Type.BOOLEAN)});
     checkUnionError(new Schema[] {Schema.create(Type.BYTES), Schema.create(Type.BYTES)});
     checkUnionError(new Schema[] {Schema.create(Type.STRING), Schema.create(Type.STRING)});
-    checkUnionError(new Schema[] {Schema.createArray(Schema.create(Type.INT)), 
+    checkUnionError(new Schema[] {Schema.createArray(Schema.create(Type.INT)),
         Schema.createArray(Schema.create(Type.INT))});
-    checkUnionError(new Schema[] {Schema.createMap(Schema.create(Type.INT)), 
+    checkUnionError(new Schema[] {Schema.createMap(Schema.create(Type.INT)),
         Schema.createMap(Schema.create(Type.INT))});
-    
+
     List<String> symbols = new ArrayList<String>();
     symbols.add("NOTHING");
-    
+
     // succeed with two branches of the same named type, if different names
     Schema u;
     u = buildUnion(new Schema[] {
@@ -408,12 +408,12 @@
         Schema.parse
         ("{\"type\":\"enum\",\"name\":\"y.A\",\"symbols\":[\"Y\"]}")});
     check(u.toString(), false);
-    
+
     u = buildUnion(new Schema[] {
         Schema.parse("{\"type\":\"fixed\",\"name\":\"x.A\",\"size\":4}"),
         Schema.parse("{\"type\":\"fixed\",\"name\":\"y.A\",\"size\":8}")});
     check(u.toString(), false);
-    
+
     // fail with two branches of the same named type, but same names
     checkUnionError(new Schema[] {Schema.createRecord("Foo", null, "org.test", false),
         Schema.createRecord("Foo", null, "org.test", false)});
@@ -421,19 +421,19 @@
         Schema.createEnum("Bar", null, "org.test", symbols)});
     checkUnionError(new Schema[] {Schema.createFixed("Baz", null, "org.test", 2),
         Schema.createFixed("Baz", null, "org.test", 1)});
-    
+
     Schema union = buildUnion(new Schema[] {Schema.create(Type.INT)});
     // fail if creating a union of a union
     checkUnionError(new Schema[] {union});
   }
-  
+
   @Test
   public void testComplexProp() throws Exception {
     String json = "{\"type\":\"null\", \"foo\": [0]}";
     Schema s = Schema.parse(json);
     assertEquals(null, s.getProp("foo"));
   }
-  
+
   @Test public void testPropOrdering() throws Exception {
     String json = "{\"type\":\"int\",\"z\":\"c\",\"yy\":\"b\",\"x\":\"a\"}";
     Schema s = Schema.parse(json);
@@ -600,7 +600,7 @@
     // check field doc is parsed correctly
     Schema schema = Schema.parse(schemaStr);
     assertEquals("test", schema.getField("f").doc());
-    
+
     // check print/read cycle preserves field doc
     schema = Schema.parse(schema.toString());
     assertEquals("test", schema.getField("f").doc());
@@ -669,7 +669,7 @@
         Schema induced = GenericData.get().induce(datum);
         assertEquals("Induced schema does not match.", schema, induced);
       }
-        
+
       assertTrue("Datum does not validate against schema "+datum,
                  GenericData.get().validate(schema, datum));
 
@@ -709,14 +709,14 @@
     assertEquals(s1, s2);
     assertFalse(s0.equals(s2));
   }
-  
+
   public static void checkBinary(Schema schema, Object datum,
                                  DatumWriter<Object> writer,
                                  DatumReader<Object> reader)
     throws IOException {
     checkBinary(schema, datum, writer, reader, null);
   }
-  
+
   public static Object checkBinary(Schema schema, Object datum,
                                  DatumWriter<Object> writer,
                                  DatumReader<Object> reader,
@@ -730,11 +730,11 @@
     byte[] data = out.toByteArray();
 
     reader.setSchema(schema);
-        
+
     Object decoded =
       reader.read(reuse, DecoderFactory.get().binaryDecoder(
           data, null));
-      
+
     assertEquals("Decoded data does not match.", datum, decoded);
     return decoded;
   }
@@ -814,7 +814,7 @@
     reader.setSchema(schema);
     Object decoded = reader.read(null, DecoderFactory.get()
         .jsonDecoder(schema, new ByteArrayInputStream(data)));
-      
+
     assertEquals("Decoded data does not match.", datum, decoded);
   }
 
@@ -909,12 +909,12 @@
   public void testRecordWithPrimitiveName() {
     Schema.parse("{\"type\":\"record\", \"name\":\"string\", \"fields\": []}");
   }
-  
+
   @Test(expected=AvroTypeException.class)
   public void testEnumWithPrimitiveName() {
     Schema.parse("{\"type\":\"enum\", \"name\":\"null\", \"symbols\": [\"A\"]}");
   }
-  
+
   private static Schema enumSchema() {
     return Schema.parse("{ \"type\": \"enum\", \"name\": \"e\", "
         + "\"symbols\": [\"a\", \"b\"]}");
@@ -926,7 +926,7 @@
     s.addProp("p1", "1");
     s.addProp("p1", "2");
   }
-  
+
   @Test(expected=AvroRuntimeException.class)
   public void testImmutability2() {
     Schema s = enumSchema();
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java b/lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java
index 34a3601..7710be8 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java
@@ -119,7 +119,7 @@
     assertTrue(protocol.contains("java.lang.CharSequence goto$(java.lang.CharSequence break$)"));
     assertTrue(protocol.contains("public interface default$"));
     assertTrue(protocol.contains("throws org.apache.avro.AvroRemoteException, finally$"));
-    
+
     assertCompilesWithJavaCompiler(c);
 
   }
@@ -144,7 +144,7 @@
     assertTrue(contents.contains("public java.lang.CharSequence package$;"));
     assertTrue(contents.contains("class volatile$ extends"));
     assertTrue(contents.contains("volatile$ short$;"));
-    
+
     assertCompilesWithJavaCompiler(c);
   }
 
@@ -159,7 +159,7 @@
     String contents = c.iterator().next().contents;
 
     assertTrue(contents.contains("new$"));
-    
+
     assertCompilesWithJavaCompiler(c);
   }
 
@@ -203,10 +203,10 @@
         assertTrue(o.contents.contains("/** Very Inner Enum */"));
       }
     }
- 
+
     assertEquals(3, count);
   }
-  
+
   @Test
   public void testProtocolWithDocs() throws IOException {
     Protocol protocol = TestProtocolParsing.getSimpleProtocol();
@@ -222,7 +222,7 @@
     }
     assertEquals("Missed generated protocol!", 1, count);
   }
-  
+
   @Test
   public void testNeedCompile() throws IOException, InterruptedException {
     String schema = "" +
@@ -233,8 +233,8 @@
     FileWriter fw = new FileWriter(inputFile);
     fw.write(schema);
     fw.close();
-    
-    File outputDir = new File(System.getProperty("test.dir", "target/test") + 
+
+    File outputDir = new File(System.getProperty("test.dir", "target/test") +
       System.getProperty("file.separator") + "test_need_compile");
     File outputFile = new File(outputDir, "Foo.java");
     outputFile.delete();
@@ -249,14 +249,14 @@
     Thread.sleep(1000);  //granularity of JVM doesn't seem to go below 1 sec
     SpecificCompiler.compileSchema(inputFile, outputDir);
     assertEquals(lastModified, outputFile.lastModified());
-    
+
     fw = new FileWriter(inputFile);
     fw.write(schema);
     fw.close();
     SpecificCompiler.compileSchema(inputFile, outputDir);
     assertTrue(lastModified != outputFile.lastModified());
   }
-  
+
   /**
    * Creates a record with the given name, error status, and fields.
    * @param name the name of the schema.
@@ -264,51 +264,51 @@
    * @param fields the field(s) to add to the schema.
    * @return the schema.
    */
-  private Schema createRecord(String name, 
+  private Schema createRecord(String name,
       boolean isError, Field... fields) {
     Schema record = Schema.createRecord(name, null, null, isError);
     record.setFields(Arrays.asList(fields));
     return record;
   }
-  
+
   @Test
   public void generateGetMethod() {
     Field height = new Field("height", Schema.create(Type.INT), null, null);
     Field Height = new Field("Height", Schema.create(Type.INT), null, null);
-    Field height_and_width = 
+    Field height_and_width =
         new Field("height_and_width", Schema.create(Type.STRING), null, null);
-    Field message = 
+    Field message =
         new Field("message", Schema.create(Type.STRING), null, null);
-    Field Message = 
+    Field Message =
         new Field("Message", Schema.create(Type.STRING), null, null);
-    Field cause = 
+    Field cause =
         new Field("cause", Schema.create(Type.STRING), null, null);
-    Field clasz = 
+    Field clasz =
         new Field("class", Schema.create(Type.STRING), null, null);
-    Field schema = 
+    Field schema =
         new Field("schema", Schema.create(Type.STRING), null, null);
-    Field Schema$ = 
+    Field Schema$ =
         new Field("Schema", Schema.create(Type.STRING), null, null);
-    
+
     assertEquals("getHeight", SpecificCompiler.generateGetMethod(
         createRecord("test", false, height), height));
-    
+
     assertEquals("getHeightAndWidth", SpecificCompiler.generateGetMethod(
         createRecord("test", false, height_and_width), height_and_width));
-  
+
     assertEquals("getMessage", SpecificCompiler.generateGetMethod(
         createRecord("test", false, message), message));
     message = new Field("message", Schema.create(Type.STRING), null, null);
     assertEquals("getMessage$", SpecificCompiler.generateGetMethod(
         createRecord("test", true, message), message));
- 
+
     assertEquals("getCause", SpecificCompiler.generateGetMethod(
         createRecord("test", false, cause), cause));
     cause = new Field("cause", Schema.create(Type.STRING), null, null);
     assertEquals("getCause$", SpecificCompiler.generateGetMethod(
         createRecord("test", true, cause), cause));
 
-    
+
     assertEquals("getClass$", SpecificCompiler.generateGetMethod(
         createRecord("test", false, clasz), clasz));
     clasz = new Field("class", Schema.create(Type.STRING), null, null);
@@ -325,42 +325,42 @@
     Height = new Field("Height", Schema.create(Type.INT), null, null);
     assertEquals("getHeight", SpecificCompiler.generateGetMethod(
         createRecord("test", false, Height), Height));
-    
+
     height = new Field("height", Schema.create(Type.INT), null, null);
     Height = new Field("Height", Schema.create(Type.INT), null, null);
     assertEquals("getHeight$0", SpecificCompiler.generateGetMethod(
         createRecord("test", false, height, Height), height));
-    
+
     height = new Field("height", Schema.create(Type.INT), null, null);
     Height = new Field("Height", Schema.create(Type.INT), null, null);
     assertEquals("getHeight$1", SpecificCompiler.generateGetMethod(
         createRecord("test", false, height, Height), Height));
-    
+
     message = new Field("message", Schema.create(Type.STRING), null, null);
     Message = new Field("Message", Schema.create(Type.STRING), null, null);
     assertEquals("getMessage$", SpecificCompiler.generateGetMethod(
         createRecord("test", true, Message), Message));
-    
+
     message = new Field("message", Schema.create(Type.STRING), null, null);
     Message = new Field("Message", Schema.create(Type.STRING), null, null);
     assertEquals("getMessage$0", SpecificCompiler.generateGetMethod(
         createRecord("test", true, message, Message), message));
-    
+
     message = new Field("message", Schema.create(Type.STRING), null, null);
     Message = new Field("Message", Schema.create(Type.STRING), null, null);
     assertEquals("getMessage$1", SpecificCompiler.generateGetMethod(
         createRecord("test", true, message, Message), Message));
-    
+
     schema = new Field("schema", Schema.create(Type.STRING), null, null);
     Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
     assertEquals("getSchema$", SpecificCompiler.generateGetMethod(
         createRecord("test", false, Schema$), Schema$));
-    
+
     schema = new Field("schema", Schema.create(Type.STRING), null, null);
     Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
     assertEquals("getSchema$0", SpecificCompiler.generateGetMethod(
         createRecord("test", false, schema, Schema$), schema));
-    
+
     schema = new Field("schema", Schema.create(Type.STRING), null, null);
     Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
     assertEquals("getSchema$1", SpecificCompiler.generateGetMethod(
@@ -371,40 +371,40 @@
   public void generateSetMethod() {
     Field height = new Field("height", Schema.create(Type.INT), null, null);
     Field Height = new Field("Height", Schema.create(Type.INT), null, null);
-    Field height_and_width = 
+    Field height_and_width =
         new Field("height_and_width", Schema.create(Type.STRING), null, null);
-    Field message = 
+    Field message =
         new Field("message", Schema.create(Type.STRING), null, null);
-    Field Message = 
+    Field Message =
         new Field("Message", Schema.create(Type.STRING), null, null);
-    Field cause = 
+    Field cause =
         new Field("cause", Schema.create(Type.STRING), null, null);
-    Field clasz = 
+    Field clasz =
         new Field("class", Schema.create(Type.STRING), null, null);
-    Field schema = 
+    Field schema =
         new Field("schema", Schema.create(Type.STRING), null, null);
-    Field Schema$ = 
+    Field Schema$ =
         new Field("Schema", Schema.create(Type.STRING), null, null);
-    
+
     assertEquals("setHeight", SpecificCompiler.generateSetMethod(
         createRecord("test", false, height), height));
-    
+
     assertEquals("setHeightAndWidth", SpecificCompiler.generateSetMethod(
         createRecord("test", false, height_and_width), height_and_width));
-  
+
     assertEquals("setMessage", SpecificCompiler.generateSetMethod(
         createRecord("test", false, message), message));
     message = new Field("message", Schema.create(Type.STRING), null, null);
     assertEquals("setMessage$", SpecificCompiler.generateSetMethod(
         createRecord("test", true, message), message));
- 
+
     assertEquals("setCause", SpecificCompiler.generateSetMethod(
         createRecord("test", false, cause), cause));
     cause = new Field("cause", Schema.create(Type.STRING), null, null);
     assertEquals("setCause$", SpecificCompiler.generateSetMethod(
         createRecord("test", true, cause), cause));
 
-    
+
     assertEquals("setClass$", SpecificCompiler.generateSetMethod(
         createRecord("test", false, clasz), clasz));
     clasz = new Field("class", Schema.create(Type.STRING), null, null);
@@ -421,86 +421,86 @@
     Height = new Field("Height", Schema.create(Type.INT), null, null);
     assertEquals("setHeight", SpecificCompiler.generateSetMethod(
         createRecord("test", false, Height), Height));
-    
+
     height = new Field("height", Schema.create(Type.INT), null, null);
     Height = new Field("Height", Schema.create(Type.INT), null, null);
     assertEquals("setHeight$0", SpecificCompiler.generateSetMethod(
         createRecord("test", false, height, Height), height));
-    
+
     height = new Field("height", Schema.create(Type.INT), null, null);
     Height = new Field("Height", Schema.create(Type.INT), null, null);
     assertEquals("setHeight$1", SpecificCompiler.generateSetMethod(
         createRecord("test", false, height, Height), Height));
-    
+
     message = new Field("message", Schema.create(Type.STRING), null, null);
     Message = new Field("Message", Schema.create(Type.STRING), null, null);
     assertEquals("setMessage$", SpecificCompiler.generateSetMethod(
         createRecord("test", true, Message), Message));
-    
+
     message = new Field("message", Schema.create(Type.STRING), null, null);
     Message = new Field("Message", Schema.create(Type.STRING), null, null);
     assertEquals("setMessage$0", SpecificCompiler.generateSetMethod(
         createRecord("test", true, message, Message), message));
-    
+
     message = new Field("message", Schema.create(Type.STRING), null, null);
     Message = new Field("Message", Schema.create(Type.STRING), null, null);
     assertEquals("setMessage$1", SpecificCompiler.generateSetMethod(
         createRecord("test", true, message, Message), Message));
-    
+
     schema = new Field("schema", Schema.create(Type.STRING), null, null);
     Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
     assertEquals("setSchema$", SpecificCompiler.generateSetMethod(
         createRecord("test", false, Schema$), Schema$));
-    
+
     schema = new Field("schema", Schema.create(Type.STRING), null, null);
     Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
     assertEquals("setSchema$0", SpecificCompiler.generateSetMethod(
         createRecord("test", false, schema, Schema$), schema));
-    
+
     schema = new Field("schema", Schema.create(Type.STRING), null, null);
     Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
     assertEquals("setSchema$1", SpecificCompiler.generateSetMethod(
         createRecord("test", false, schema, Schema$), Schema$));
   }
-  
+
   @Test
   public void generateHasMethod() {
     Field height = new Field("height", Schema.create(Type.INT), null, null);
     Field Height = new Field("Height", Schema.create(Type.INT), null, null);
-    Field height_and_width = 
+    Field height_and_width =
         new Field("height_and_width", Schema.create(Type.STRING), null, null);
-    Field message = 
+    Field message =
         new Field("message", Schema.create(Type.STRING), null, null);
-    Field Message = 
+    Field Message =
         new Field("Message", Schema.create(Type.STRING), null, null);
-    Field cause = 
+    Field cause =
         new Field("cause", Schema.create(Type.STRING), null, null);
-    Field clasz = 
+    Field clasz =
         new Field("class", Schema.create(Type.STRING), null, null);
-    Field schema = 
+    Field schema =
         new Field("schema", Schema.create(Type.STRING), null, null);
-    Field Schema$ = 
+    Field Schema$ =
         new Field("Schema", Schema.create(Type.STRING), null, null);
-    
+
     assertEquals("hasHeight", SpecificCompiler.generateHasMethod(
         createRecord("test", false, height), height));
-    
+
     assertEquals("hasHeightAndWidth", SpecificCompiler.generateHasMethod(
         createRecord("test", false, height_and_width), height_and_width));
-  
+
     assertEquals("hasMessage", SpecificCompiler.generateHasMethod(
         createRecord("test", false, message), message));
     message = new Field("message", Schema.create(Type.STRING), null, null);
     assertEquals("hasMessage$", SpecificCompiler.generateHasMethod(
         createRecord("test", true, message), message));
- 
+
     assertEquals("hasCause", SpecificCompiler.generateHasMethod(
         createRecord("test", false, cause), cause));
     cause = new Field("cause", Schema.create(Type.STRING), null, null);
     assertEquals("hasCause$", SpecificCompiler.generateHasMethod(
         createRecord("test", true, cause), cause));
 
-    
+
     assertEquals("hasClass$", SpecificCompiler.generateHasMethod(
         createRecord("test", false, clasz), clasz));
     clasz = new Field("class", Schema.create(Type.STRING), null, null);
@@ -517,86 +517,86 @@
     Height = new Field("Height", Schema.create(Type.INT), null, null);
     assertEquals("hasHeight", SpecificCompiler.generateHasMethod(
         createRecord("test", false, Height), Height));
-    
+
     height = new Field("height", Schema.create(Type.INT), null, null);
     Height = new Field("Height", Schema.create(Type.INT), null, null);
     assertEquals("hasHeight$0", SpecificCompiler.generateHasMethod(
         createRecord("test", false, height, Height), height));
-    
+
     height = new Field("height", Schema.create(Type.INT), null, null);
     Height = new Field("Height", Schema.create(Type.INT), null, null);
     assertEquals("hasHeight$1", SpecificCompiler.generateHasMethod(
         createRecord("test", false, height, Height), Height));
-    
+
     message = new Field("message", Schema.create(Type.STRING), null, null);
     Message = new Field("Message", Schema.create(Type.STRING), null, null);
     assertEquals("hasMessage$", SpecificCompiler.generateHasMethod(
         createRecord("test", true, Message), Message));
-    
+
     message = new Field("message", Schema.create(Type.STRING), null, null);
     Message = new Field("Message", Schema.create(Type.STRING), null, null);
     assertEquals("hasMessage$0", SpecificCompiler.generateHasMethod(
         createRecord("test", true, message, Message), message));
-    
+
     message = new Field("message", Schema.create(Type.STRING), null, null);
     Message = new Field("Message", Schema.create(Type.STRING), null, null);
     assertEquals("hasMessage$1", SpecificCompiler.generateHasMethod(
         createRecord("test", true, message, Message), Message));
-    
+
     schema = new Field("schema", Schema.create(Type.STRING), null, null);
     Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
     assertEquals("hasSchema$", SpecificCompiler.generateHasMethod(
         createRecord("test", false, Schema$), Schema$));
-    
+
     schema = new Field("schema", Schema.create(Type.STRING), null, null);
     Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
     assertEquals("hasSchema$0", SpecificCompiler.generateHasMethod(
         createRecord("test", false, schema, Schema$), schema));
-    
+
     schema = new Field("schema", Schema.create(Type.STRING), null, null);
     Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
     assertEquals("hasSchema$1", SpecificCompiler.generateHasMethod(
         createRecord("test", false, schema, Schema$), Schema$));
   }
-  
+
   @Test
   public void generateClearMethod() {
     Field height = new Field("height", Schema.create(Type.INT), null, null);
     Field Height = new Field("Height", Schema.create(Type.INT), null, null);
-    Field height_and_width = 
+    Field height_and_width =
         new Field("height_and_width", Schema.create(Type.STRING), null, null);
-    Field message = 
+    Field message =
         new Field("message", Schema.create(Type.STRING), null, null);
-    Field Message = 
+    Field Message =
         new Field("Message", Schema.create(Type.STRING), null, null);
-    Field cause = 
+    Field cause =
         new Field("cause", Schema.create(Type.STRING), null, null);
-    Field clasz = 
+    Field clasz =
         new Field("class", Schema.create(Type.STRING), null, null);
-    Field schema = 
+    Field schema =
         new Field("schema", Schema.create(Type.STRING), null, null);
-    Field Schema$ = 
+    Field Schema$ =
         new Field("Schema", Schema.create(Type.STRING), null, null);
-    
+
     assertEquals("clearHeight", SpecificCompiler.generateClearMethod(
         createRecord("test", false, height), height));
-    
+
     assertEquals("clearHeightAndWidth", SpecificCompiler.generateClearMethod(
         createRecord("test", false, height_and_width), height_and_width));
-  
+
     assertEquals("clearMessage", SpecificCompiler.generateClearMethod(
         createRecord("test", false, message), message));
     message = new Field("message", Schema.create(Type.STRING), null, null);
     assertEquals("clearMessage$", SpecificCompiler.generateClearMethod(
         createRecord("test", true, message), message));
- 
+
     assertEquals("clearCause", SpecificCompiler.generateClearMethod(
         createRecord("test", false, cause), cause));
     cause = new Field("cause", Schema.create(Type.STRING), null, null);
     assertEquals("clearCause$", SpecificCompiler.generateClearMethod(
         createRecord("test", true, cause), cause));
 
-    
+
     assertEquals("clearClass$", SpecificCompiler.generateClearMethod(
         createRecord("test", false, clasz), clasz));
     clasz = new Field("class", Schema.create(Type.STRING), null, null);
@@ -613,42 +613,42 @@
     Height = new Field("Height", Schema.create(Type.INT), null, null);
     assertEquals("clearHeight", SpecificCompiler.generateClearMethod(
         createRecord("test", false, Height), Height));
-    
+
     height = new Field("height", Schema.create(Type.INT), null, null);
     Height = new Field("Height", Schema.create(Type.INT), null, null);
     assertEquals("clearHeight$0", SpecificCompiler.generateClearMethod(
         createRecord("test", false, height, Height), height));
-    
+
     height = new Field("height", Schema.create(Type.INT), null, null);
     Height = new Field("Height", Schema.create(Type.INT), null, null);
     assertEquals("clearHeight$1", SpecificCompiler.generateClearMethod(
         createRecord("test", false, height, Height), Height));
-    
+
     message = new Field("message", Schema.create(Type.STRING), null, null);
     Message = new Field("Message", Schema.create(Type.STRING), null, null);
     assertEquals("clearMessage$", SpecificCompiler.generateClearMethod(
         createRecord("test", true, Message), Message));
-    
+
     message = new Field("message", Schema.create(Type.STRING), null, null);
     Message = new Field("Message", Schema.create(Type.STRING), null, null);
     assertEquals("clearMessage$0", SpecificCompiler.generateClearMethod(
         createRecord("test", true, message, Message), message));
-    
+
     message = new Field("message", Schema.create(Type.STRING), null, null);
     Message = new Field("Message", Schema.create(Type.STRING), null, null);
     assertEquals("clearMessage$1", SpecificCompiler.generateClearMethod(
         createRecord("test", true, message, Message), Message));
-    
+
     schema = new Field("schema", Schema.create(Type.STRING), null, null);
     Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
     assertEquals("clearSchema$", SpecificCompiler.generateClearMethod(
         createRecord("test", false, Schema$), Schema$));
-    
+
     schema = new Field("schema", Schema.create(Type.STRING), null, null);
     Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
     assertEquals("clearSchema$0", SpecificCompiler.generateClearMethod(
         createRecord("test", false, schema, Schema$), schema));
-    
+
     schema = new Field("schema", Schema.create(Type.STRING), null, null);
     Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
     assertEquals("clearSchema$1", SpecificCompiler.generateClearMethod(
@@ -694,7 +694,7 @@
    * that the generated code is valid.
    */
   public static void
-      assertCompiles(Schema schema, boolean useJavaCompiler) 
+      assertCompiles(Schema schema, boolean useJavaCompiler)
   throws IOException {
     Collection<OutputFile> outputs = new SpecificCompiler(schema).compile();
     assertTrue(null != outputs);
@@ -702,7 +702,7 @@
       assertCompilesWithJavaCompiler(outputs);
     }
   }
-  
+
   /**
    * Checks that a protocol passes through the SpecificCompiler,
    * and, optionally, uses the system's Java compiler to check
@@ -716,9 +716,9 @@
       assertCompilesWithJavaCompiler(outputs);
     }
   }
-  
+
   /** Uses the system's java compiler to actually compile the generated code. */
-  static void assertCompilesWithJavaCompiler(Collection<OutputFile> outputs) 
+  static void assertCompilesWithJavaCompiler(Collection<OutputFile> outputs)
   throws IOException {
     if (outputs.isEmpty()) {
       return;               // Nothing to compile!
@@ -730,10 +730,10 @@
     }
 
     JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
-    StandardJavaFileManager fileManager = 
+    StandardJavaFileManager fileManager =
       compiler.getStandardFileManager(null, null, null);
-    
-    CompilationTask cTask = compiler.getTask(null, fileManager, null, null, 
+
+    CompilationTask cTask = compiler.getTask(null, fileManager, null, null,
         null,
         fileManager.getJavaFileObjects(
             javaFiles.toArray(new File[javaFiles.size()])));
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/generic/TestDeepCopy.java b/lang/java/ipc/src/test/java/org/apache/avro/generic/TestDeepCopy.java
index 0cf36f5..3829391 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/generic/TestDeepCopy.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/generic/TestDeepCopy.java
@@ -49,37 +49,37 @@
     interopBuilder.setBytesField(ByteBuffer.wrap(new byte[] { 1, 2, 3, 4 }));
     interopBuilder.setDoubleField(3.14d);
     interopBuilder.setEnumField(Kind.B);
-    interopBuilder.setFixedField(new MD5(new byte[] { 
+    interopBuilder.setFixedField(new MD5(new byte[] {
         4, 3, 2, 1, 4, 3, 2, 1, 4, 3, 2, 1, 4, 3, 2, 1 }));
     interopBuilder.setFloatField(6.022f);
     interopBuilder.setIntField(32);
     interopBuilder.setLongField(64L);
-    
-    Map<java.lang.String,org.apache.avro.Foo> map = 
+
+    Map<java.lang.String,org.apache.avro.Foo> map =
       new HashMap<java.lang.String,org.apache.avro.Foo>(1);
     map.put("foo", Foo.newBuilder().setLabel("bar").build());
     interopBuilder.setMapField(map);
-    
+
     interopBuilder.setNullField(null);
-    
+
     Node.Builder rootBuilder = Node.newBuilder().setLabel("/");
     Node.Builder homeBuilder = Node.newBuilder().setLabel("home");
     homeBuilder.setChildren(new ArrayList<Node>(0));
     rootBuilder.setChildren(Arrays.asList(new Node[] { homeBuilder.build() }));
     interopBuilder.setRecordField(rootBuilder.build());
-    
+
     interopBuilder.setStringField("Hello");
     interopBuilder.setUnionField(Arrays.asList(new ByteBuffer[] {
         ByteBuffer.wrap(new byte[] { 1, 2 }) }));
-    
+
     Interop interop = interopBuilder.build();
-    
+
     // Verify that deepCopy works for all fields:
     for (Field field : Interop.SCHEMA$.getFields()) {
       // Original field and deep copy should be equivalent:
       if (interop.get(field.pos()) instanceof ByteBuffer) {
         assertTrue(Arrays.equals(((ByteBuffer)interop.get(field.pos())).array(),
-            ((ByteBuffer)GenericData.get().deepCopy(field.schema(), 
+            ((ByteBuffer)GenericData.get().deepCopy(field.schema(),
                 interop.get(field.pos()))).array()));
       }
       else {
@@ -87,7 +87,7 @@
             SpecificData.get().deepCopy(
                 field.schema(), interop.get(field.pos())));
       }
-      
+
       // Original field and deep copy should be different instances:
       if ((field.schema().getType() != Type.ENUM)
            && (field.schema().getType() != Type.NULL)
@@ -98,7 +98,7 @@
            && (field.schema().getType() != Type.DOUBLE)
            && (field.schema().getType() != Type.STRING)) {
         assertFalse("Field " + field.name() + " is same instance in deep copy",
-            interop.get(field.pos()) == 
+            interop.get(field.pos()) ==
               GenericData.get().deepCopy(
                   field.schema(), interop.get(field.pos())));
       }
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/io/Perf.java b/lang/java/ipc/src/test/java/org/apache/avro/io/Perf.java
index 762a215..1f8a892 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/io/Perf.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/io/Perf.java
@@ -52,7 +52,7 @@
 public class Perf {
   private static final int COUNT = 250000; // needs to be a multiple of 4
   private static final int CYCLES = 800;
-  
+
   /**
    * Use a fixed value seed for random number generation
    * to allow for better cross-run comparisons.
@@ -62,7 +62,7 @@
   protected static Random newRandom() {
     return new Random(SEED);
   }
-  
+
   private static class TestDescriptor {
     Class<? extends Test> test;
     String param;
@@ -75,7 +75,7 @@
       typeList.add(this);
     }
   }
-  
+
   private static final List<TestDescriptor> BASIC = new ArrayList<TestDescriptor>();
   private static final List<TestDescriptor> RECORD = new ArrayList<TestDescriptor>();
   private static final List<TestDescriptor> GENERIC = new ArrayList<TestDescriptor>();
@@ -132,7 +132,7 @@
     new TestDescriptor(ReflectNestedLargeFloatArrayTest.class, "-REFnlf").add(REFLECT);
     new TestDescriptor(ReflectNestedLargeFloatArrayBlockedTest.class, "-REFnlfb").add(REFLECT);
   }
-  
+
   private static void usage() {
     StringBuilder usage = new StringBuilder("Usage: Perf { -nowrite | -noread | ");
     StringBuilder details = new StringBuilder();
@@ -154,7 +154,7 @@
     System.out.println(usage.toString());
     System.out.print(details.toString());
   }
-  
+
   public static void main(String[] args) throws Exception {
     List<Test> tests = new ArrayList<Test>();
     boolean writeTests = true;
@@ -192,7 +192,7 @@
     }
     System.out.println("Executing tests: \n" + tests +  "\n readTests:" +
         readTests + "\n writeTests:" + writeTests + "\n cycles=" + CYCLES);
-    
+
     for (int k = 0; k < tests.size(); k++) {
       Test t = tests.get(k);
       try {
@@ -210,7 +210,7 @@
         throw e;
       }
     }
-    
+
     printHeader();
 
     for (int k = 0; k < tests.size(); k++) {
@@ -248,25 +248,25 @@
       t.reset();
     }
   }
-  
+
   private static final void printHeader() {
     String header = String.format(
         "%60s     time    M entries/sec   M bytes/sec  bytes/cycle",
         "test name");
     System.out.println(header.toString());
   }
-  
+
   private static final void printResult(long s, Test t, String name) {
     s /= 1000;
     double entries = (t.cycles * (double) t.count);
     double bytes = t.cycles * (double) t.encodedSize;
     StringBuilder result = new StringBuilder();
     result.append(String.format("%42s: %6d ms  ", name, (s/1000)));
-    result.append(String.format("%10.3f   %11.3f   %11d", 
+    result.append(String.format("%10.3f   %11.3f   %11d",
         (entries / s), (bytes/ s),  t.encodedSize));
     System.out.println(result.toString());
   }
-  
+
   private abstract static class Test {
 
     /**
@@ -280,7 +280,7 @@
     protected boolean isWriteTest = true;
     static DecoderFactory decoder_factory = new DecoderFactory();
     static EncoderFactory encoder_factory = new EncoderFactory();
-    
+
     public Test(String name, int cycles, int count) {
       this.name = name;
       this.cycles = cycles;
@@ -291,33 +291,33 @@
      * Reads data from a Decoder and returns the time taken in nanoseconds.
      */
     abstract long readTest() throws IOException;
-    
+
     /**
      * Writes data to an Encoder and returns the time taken in nanoseconds.
      */
     abstract long writeTest() throws IOException;
-    
+
     final boolean isWriteTest() {
       return isWriteTest;
     }
-    
+
     final boolean isReadTest() {
       return isReadTest;
     }
- 
+
     /** initializes data for read and write tests **/
     abstract void init() throws IOException;
 
     /** clears generated data arrays and other large objects created during initialization **/
     abstract void reset();
-    
+
     @Override
     public String toString() {
       return this.getClass().getSimpleName();
     }
-       
+
   }
-  
+
   /** the basic test writes a simple schema directly to an encoder or
    * reads from an array.  It does not use GenericDatumReader or any
    * higher level constructs, just manual serialization.
@@ -340,7 +340,7 @@
       readInternal(d);
       return (System.nanoTime() - t);
     }
-    
+
     @Override
     public final long writeTest() throws IOException {
       long t = System.nanoTime();
@@ -349,11 +349,11 @@
       e.flush();
       return (System.nanoTime() - t);
     }
-    
+
     protected Decoder getDecoder() throws IOException {
       return newDecoder();
     }
-    
+
     private Encoder getEncoder() throws IOException {
       return newEncoder(getOutputStream());
     }
@@ -361,7 +361,7 @@
     protected Decoder newDecoder() {
       return decoder_factory.binaryDecoder(data, null);
     }
-    
+
     protected Encoder newEncoder(ByteArrayOutputStream out) throws IOException {
       Encoder e = encoder_factory.binaryEncoder(out, null);
 //    Encoder e = encoder_factory.directBinaryEncoder(out, null);
@@ -373,7 +373,7 @@
     private ByteArrayOutputStream getOutputStream() {
       return new ByteArrayOutputStream((int)(encodedSize > 0 ? encodedSize : count));
     }
-    
+
     @Override
     void init() throws IOException {
       genSourceData();
@@ -390,7 +390,7 @@
     abstract void readInternal(Decoder d) throws IOException;
     abstract void writeInternal(Encoder e) throws IOException;
   }
-  
+
   static class IntTest extends BasicTest {
     protected int[] sourceData = null;
     public IntTest() throws IOException {
@@ -412,7 +412,7 @@
         sourceData[i+3] = r.nextInt(150000000); // most in 4, some in 5
       }
     }
-   
+
     @Override
     void readInternal(Decoder d) throws IOException {
       for (int i = 0; i < count/4; i++) {
@@ -432,7 +432,7 @@
         e.writeInt(sourceData[i+3]);
       }
     }
-  
+
     @Override
     void reset() {
       sourceData = null;
@@ -466,30 +466,30 @@
       }
     }
   }
- 
+
   // this tests reading Longs that are sometimes very large
   static class LongTest extends BasicTest {
     private long[] sourceData = null;
     public LongTest() throws IOException {
       super("Long", "{ \"type\": \"long\"} ");
     }
-    
+
     @Override
     void genSourceData() {
       Random r = newRandom();
       sourceData = new long[count];
       for (int i = 0; i < sourceData.length; i+=4) {
-        sourceData[i] = r.nextLong() % 0x7FL; // half fit in 1, half in 2 
+        sourceData[i] = r.nextLong() % 0x7FL; // half fit in 1, half in 2
         sourceData[i+1] = r.nextLong() % 0x1FFFFFL; // half fit in <=3, half in 4
         sourceData[i+2] = r.nextLong() % 0x3FFFFFFFFL; // half in <=5, half in 6
-        sourceData[i+3] = r.nextLong() % 0x1FFFFFFFFFFFFL; // half in <=8, half in 9 
+        sourceData[i+3] = r.nextLong() % 0x1FFFFFFFFFFFFL; // half in <=8, half in 9
       }
       // last 16, make full size
       for (int i = sourceData.length - 16; i < sourceData.length; i ++) {
         sourceData[i] = r.nextLong();
       }
     }
-   
+
     @Override
     void readInternal(Decoder d) throws IOException {
       for (int i = 0; i < count/4; i++) {
@@ -509,14 +509,14 @@
         e.writeLong(sourceData[i+3]);
       }
     }
-  
+
     @Override
     void reset() {
       sourceData = null;
       data = null;
     }
   }
-  
+
   static class FloatTest extends BasicTest {
     float[] sourceData = null;
     public FloatTest() throws IOException {
@@ -531,10 +531,10 @@
       Random r = newRandom();
       sourceData = new float[count];
       for (int i = 0; i < sourceData.length;) {
-        sourceData[i++] = r.nextFloat(); 
+        sourceData[i++] = r.nextFloat();
       }
     }
-   
+
     @Override
     void readInternal(Decoder d) throws IOException {
       for (int i = 0; i < count; i+=4) {
@@ -554,7 +554,7 @@
         e.writeFloat(sourceData[i+3]);
       }
     }
-  
+
     @Override
     void reset() {
       sourceData = null;
@@ -567,16 +567,16 @@
     public DoubleTest() throws IOException {
       super("Double", "{ \"type\": \"double\"} ");
     }
-    
+
     @Override
     void genSourceData() {
       Random r = newRandom();
       sourceData = new double[count];
       for (int i = 0; i < sourceData.length;) {
-        sourceData[i++] = r.nextDouble(); 
+        sourceData[i++] = r.nextDouble();
       }
     }
-   
+
     @Override
     void readInternal(Decoder d) throws IOException {
       for (int i = 0; i < count; i+=4) {
@@ -596,29 +596,29 @@
         e.writeDouble(sourceData[i+3]);
       }
     }
-  
+
     @Override
     void reset() {
       sourceData = null;
       data = null;
     }
   }
-  
+
   static class BoolTest extends BasicTest {
     boolean[] sourceData = null;
     public BoolTest() throws IOException {
       super("Boolean", "{ \"type\": \"boolean\"} ");
     }
-    
+
     @Override
     void genSourceData() {
       Random r = newRandom();
       sourceData = new boolean[count];
       for (int i = 0; i < sourceData.length;) {
-        sourceData[i++] = r.nextBoolean(); 
+        sourceData[i++] = r.nextBoolean();
       }
     }
-   
+
     @Override
     void readInternal(Decoder d) throws IOException {
       for (int i = 0; i < count/4; i++) {
@@ -638,20 +638,20 @@
         e.writeBoolean(sourceData[i+3]);
       }
     }
-  
+
     @Override
     void reset() {
       sourceData = null;
       data = null;
     }
   }
-  
+
   static class BytesTest extends BasicTest {
     byte[][] sourceData = null;
     public BytesTest() throws IOException {
       super("Bytes", "{ \"type\": \"bytes\"} ", 5);
     }
-    
+
     @Override
     void genSourceData() {
       Random r = newRandom();
@@ -659,10 +659,10 @@
       for (int i = 0; i < sourceData.length;) {
         byte[] data = new byte[r.nextInt(70)];
         r.nextBytes(data);
-        sourceData[i++] = data; 
+        sourceData[i++] = data;
       }
     }
-   
+
     @Override
     void readInternal(Decoder d) throws IOException {
       ByteBuffer bb = ByteBuffer.allocate(70);
@@ -683,14 +683,14 @@
         e.writeBytes(sourceData[i+3]);
       }
     }
-  
+
     @Override
     void reset() {
       sourceData = null;
       data = null;
     }
   }
-  
+
   private static String randomString(Random r) {
     char[] data = new char[r.nextInt(70)];
     for (int j = 0; j < data.length; j++) {
@@ -704,7 +704,7 @@
     public StringTest() throws IOException {
       super("String", "{ \"type\": \"string\"} ", 5);
     }
-    
+
     @Override
     void genSourceData() {
       Random r = newRandom();
@@ -712,7 +712,7 @@
       for (int i = 0; i < sourceData.length;)
         sourceData[i++] = randomString(r);
     }
-   
+
     @Override
     void readInternal(Decoder d) throws IOException {
       Utf8 utf = new Utf8();
@@ -733,14 +733,14 @@
         e.writeString(sourceData[i+3]);
       }
     }
-  
+
     @Override
     void reset() {
       sourceData = null;
       data = null;
     }
   }
-  
+
   static class ArrayTest extends FloatTest {
     public ArrayTest() throws IOException {
       super("Array",
@@ -757,7 +757,7 @@
           "    }" +
           "   }]}}");
     }
-   
+
     @Override
     void readInternal(Decoder d) throws IOException {
       d.readArrayStart();
@@ -791,7 +791,7 @@
       e.writeArrayEnd();
     }
   }
-  
+
   static class MapTest extends FloatTest {
     public MapTest() throws IOException {
       super("Map", "{ \"type\": \"map\", \"values\": " +
@@ -802,7 +802,7 @@
           "   {\"name\":\"f4\", \"type\":\"float\"}]" +
           "  }} ");
     }
-   
+
     @Override
     void readInternal(Decoder d) throws IOException {
       Utf8 key = new Utf8();
@@ -834,8 +834,8 @@
       e.writeMapEnd();
     }
   }
-  
-  private static final String RECORD_SCHEMA = 
+
+  private static final String RECORD_SCHEMA =
     "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
     + "{ \"name\": \"f1\", \"type\": \"double\" },\n"
     + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
@@ -844,7 +844,7 @@
     + "{ \"name\": \"f5\", \"type\": \"int\" },\n"
     + "{ \"name\": \"f6\", \"type\": \"int\" }\n"
     + "] }";
-  
+
   private static final String NESTED_RECORD_SCHEMA =
     "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
     + "{ \"name\": \"f1\", \"type\": \n" +
@@ -857,7 +857,7 @@
     + "{ \"name\": \"f5\", \"type\": \"int\" },\n"
     + "{ \"name\": \"f6\", \"type\": \"int\" }\n"
     + "] }";
-  
+
   private static class Rec {
     double f1;
     double f2;
@@ -866,7 +866,7 @@
     int f5;
     int f6;
     Rec() {
-      
+
     }
     Rec(Random r) {
       f1 = r.nextDouble();
@@ -891,7 +891,7 @@
       Random r = newRandom();
       sourceData = new Rec[count];
       for (int i = 0; i < sourceData.length; i++) {
-        sourceData[i] = new Rec(r); 
+        sourceData[i] = new Rec(r);
       }
     }
     @Override
@@ -923,7 +923,7 @@
       data = null;
     }
   }
-  
+
   static class ValidatingRecord extends RecordTest {
     ValidatingRecord() throws IOException {
       super("ValidatingRecord");
@@ -934,10 +934,10 @@
     }
     @Override
     protected Encoder newEncoder(ByteArrayOutputStream out) throws IOException {
-      return encoder_factory.validatingEncoder(schema, super.newEncoder(out));  
+      return encoder_factory.validatingEncoder(schema, super.newEncoder(out));
     }
   }
-  
+
   static class ResolvingRecord extends RecordTest {
     public ResolvingRecord() throws IOException {
       super("ResolvingRecord");
@@ -962,7 +962,7 @@
     + "{ \"name\": \"f8\", \"type\": \"string\","
       + "\"default\": \"undefined\" }\n"
     + "] }";
-  
+
   private static final String RECORD_SCHEMA_WITH_OUT_OF_ORDER =
     "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
     + "{ \"name\": \"f1\", \"type\": \"double\" },\n"
@@ -1025,7 +1025,7 @@
       }
     }
   }
-  
+
   /**
    * Tests the performance of resolving a change in field order.
    */
@@ -1063,7 +1063,7 @@
       }
     }
   }
-  
+
   /**
    * Tests the performance of resolving a type promotion.
    */
@@ -1101,7 +1101,7 @@
       }
     }
   }
-  
+
   static class GenericTest extends BasicTest {
     GenericRecord[] sourceData = null;
     protected final GenericDatumReader<Object> reader;
@@ -1133,7 +1133,7 @@
         rec.put(3, r.nextInt());
         rec.put(4, r.nextInt());
         rec.put(5, r.nextInt());
-        sourceData[i] = rec; 
+        sourceData[i] = rec;
       }
     }
     @Override
@@ -1156,14 +1156,14 @@
       data = null;
     }
   }
-  
-  private static final String GENERIC_STRINGS = 
+
+  private static final String GENERIC_STRINGS =
     "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
     + "{ \"name\": \"f1\", \"type\": \"string\" },\n"
     + "{ \"name\": \"f2\", \"type\": \"string\" },\n"
     + "{ \"name\": \"f3\", \"type\": \"string\" }\n"
     + "] }";
-  
+
   static class GenericStrings extends GenericTest {
     public GenericStrings() throws IOException {
       super("GenericStrings", GENERIC_STRINGS);
@@ -1177,7 +1177,7 @@
         rec.put(0, randomString(r));
         rec.put(1, randomString(r));
         rec.put(2, randomString(r));
-        sourceData[i] = rec; 
+        sourceData[i] = rec;
       }
     }
   }
@@ -1210,11 +1210,11 @@
       rec.put(3, r.nextInt());
       rec.put(4, r.nextInt());
       rec.put(5, r.nextInt());
-      sourceData[i] = rec; 
+      sourceData[i] = rec;
     }
     return sourceData;
   }
-  
+
   static class GenericNestedFake extends BasicTest {
     //reads and writes generic data, but not using
     //GenericDatumReader or GenericDatumWriter
@@ -1267,7 +1267,7 @@
       data = null;
       sourceData = null;
     }
-    
+
   }
 
   private static abstract class GenericResolving extends GenericTest {
@@ -1312,7 +1312,7 @@
       return new Schema.Parser().parse(RECORD_SCHEMA_WITH_PROMOTION);
     }
   }
-  
+
   static class GenericOneTimeDecoderUse extends GenericTest {
     public GenericOneTimeDecoderUse() throws IOException {
       super("GenericOneTimeDecoderUse_");
@@ -1487,7 +1487,7 @@
       return new Rec(r);
     }
   }
-  
+
   static class ReflectFloatTest extends ReflectTest<float[]> {
     ReflectFloatTest() throws IOException {
       super("ReflectFloat", new float[0], COUNT);
@@ -1531,7 +1531,7 @@
       return populateDoubleArray(r);
     }
   }
-  
+
   static class ReflectIntArrayTest extends ReflectTest<int[]> {
     ReflectIntArrayTest() throws IOException {
       super("ReflectIntArray", new int[0], 12);
@@ -1542,7 +1542,7 @@
       return populateIntArray(r);
     }
   }
-  
+
   static class ReflectLongArrayTest extends ReflectTest<long[]> {
     ReflectLongArrayTest() throws IOException {
       super("ReflectLongArray", new long[0], 24);
@@ -1571,7 +1571,7 @@
 
       Foo() {
       }
-      
+
       Foo(Random r) {
         bar = new Vals[smallArraySize(r)];
         for (int i = 0; i < bar.length; i++) {
@@ -1588,7 +1588,7 @@
 
       Vals(){
       }
-      
+
       Vals(Random r) {
         this.f1 = r.nextFloat();
         this.f2 = r.nextFloat();
@@ -1601,7 +1601,7 @@
 
   static public class FloatFoo {
     float[] floatBar;
-    
+
     FloatFoo() {
     }
 
@@ -1624,7 +1624,7 @@
     int size = large ? largeArraySize(r) : smallArraySize(r);
     return populateFloatArray(r, size);
   }
-  
+
   static float[] populateFloatArray(Random r, int size) {
     float[] result = new float[size];
     for (int i = 0; i < result.length; i++) {
@@ -1632,11 +1632,11 @@
     }
     return result;
   }
-  
+
   static double[] populateDoubleArray(Random r) {
     return populateDoubleArray(r, smallArraySize(r));
   }
-  
+
   static double[] populateDoubleArray(Random r, int size) {
     double[] result = new double[size];
     for (int i = 0; i < result.length; i++) {
@@ -1653,7 +1653,7 @@
     }
     return result;
   }
-  
+
   static long[] populateLongArray(Random r) {
     int size = smallArraySize(r);
     long[] result = new long[size];
@@ -1662,7 +1662,7 @@
     }
     return result;
   }
-  
+
   static class ReflectNestedFloatArrayTest extends ReflectTest<FloatFoo> {
     public ReflectNestedFloatArrayTest() throws IOException {
       super("ReflectNestedFloatArray", new FloatFoo(new Random(), false), 10);
@@ -1686,7 +1686,7 @@
     }
 
   }
-  
+
   static class ReflectNestedLargeFloatArrayBlockedTest extends ReflectTest<FloatFoo> {
     public ReflectNestedLargeFloatArrayBlockedTest() throws IOException {
       super("ReflectNestedLargeFloatArrayBlocked", new FloatFoo(new Random(), true),
@@ -1697,7 +1697,7 @@
     protected FloatFoo createDatum(Random r) {
       return new FloatFoo(r, true);
     }
-    
+
     @Override
     protected Encoder newEncoder(ByteArrayOutputStream out) throws IOException {
       return new EncoderFactory().configureBlockSize(254).blockingBinaryEncoder(out, null);
@@ -1722,7 +1722,7 @@
 
     Rec1() {
     }
-    
+
     Rec1(Random r) {
       d1 = r.nextDouble();
       d11 = r.nextDouble();
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestLocalTransceiver.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestLocalTransceiver.java
index 09f7078..47ee403 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestLocalTransceiver.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestLocalTransceiver.java
@@ -59,7 +59,7 @@
         "m").getRequest());
     params.put("x", new Utf8("hello"));
     GenericRequestor r = new GenericRequestor(protocol, t);
-    
+
     for(int x = 0; x < 5; x++)
       assertEquals(new Utf8("there"), r.request("m", params));
   }
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServer.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServer.java
index a39de4c..14981c3 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServer.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServer.java
@@ -49,31 +49,31 @@
   public static class MailImpl implements Mail {
 
     private CountDownLatch allMessages = new CountDownLatch(5);
-    
+
     // in this simple example just return details of the message
     public String send(Message message) {
-      return "Sent message to ["+ message.getTo().toString() + 
-          "] from [" + message.getFrom().toString() + "] with body [" + 
+      return "Sent message to ["+ message.getTo().toString() +
+          "] from [" + message.getFrom().toString() + "] with body [" +
           message.getBody().toString() + "]";
     }
-    
+
     public void fireandforget(Message message) {
       allMessages.countDown();
     }
-    
+
     private void awaitMessages() throws InterruptedException {
       allMessages.await(2, TimeUnit.SECONDS);
     }
-    
+
     private void assertAllMessagesReceived() {
       assertEquals(0, allMessages.getCount());
     }
 
     public void reset() {
-      allMessages = new CountDownLatch(5);      
+      allMessages = new CountDownLatch(5);
     }
   }
-  
+
   @BeforeClass
   public static void initializeConnections()throws Exception {
     // start server
@@ -82,23 +82,23 @@
     Responder responder = new SpecificResponder(Mail.class, mailService);
     server = initializeServer(responder);
     server.start();
-  
+
     int serverPort = server.getPort();
     System.out.println("server port : " + serverPort);
 
     transceiver = initializeTransceiver(serverPort);
     proxy = SpecificRequestor.getClient(Mail.class, transceiver);
   }
-  
+
   protected static Server initializeServer(Responder responder) {
     return new NettyServer(responder, new InetSocketAddress(0));
   }
-  
+
   protected static Transceiver initializeTransceiver(int serverPort) throws IOException {
     return new NettyTransceiver(new InetSocketAddress(
         serverPort), CONNECT_TIMEOUT_MILLIS);
   }
-  
+
   @AfterClass
   public static void tearDownConnections() throws Exception{
     transceiver.close();
@@ -117,7 +117,7 @@
         "Sent message to [wife] from [husband] with body [I love you!]",
         result.toString());
   }
-  
+
   @Test
   public void testOneway() throws Exception {
     for (int x = 0; x < 5; x++) {
@@ -126,7 +126,7 @@
     mailService.awaitMessages();
     mailService.assertAllMessagesReceived();
   }
-  
+
   @Test
   public void testMixtureOfRequests() throws Exception {
     mailService.reset();
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerConcurrentExecution.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerConcurrentExecution.java
index 6d3fd1e..8859998 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerConcurrentExecution.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerConcurrentExecution.java
@@ -37,31 +37,31 @@
 import org.junit.Test;
 
 /**
- * Verifies that RPCs executed by different client threads using the same 
+ * Verifies that RPCs executed by different client threads using the same
  * NettyTransceiver will execute concurrently.  The test follows these steps:
- * 1. Execute the {@link #org.apache.avro.test.Simple.add(int, int)} RPC to 
+ * 1. Execute the {@link #org.apache.avro.test.Simple.add(int, int)} RPC to
  *    complete the Avro IPC handshake.
  * 2a. In a background thread, wait for the waitLatch.
- * 3a. In the main thread, invoke 
- *    {@link #org.apache.avro.test.Simple.hello(String)} with the argument 
- *    "wait".  This causes the ClientImpl running on the server to count down 
- *    the wait latch, which will unblock the background thread and allow it to 
- *    proceed.  After counting down the latch, this call blocks, waiting for 
+ * 3a. In the main thread, invoke
+ *    {@link #org.apache.avro.test.Simple.hello(String)} with the argument
+ *    "wait".  This causes the ClientImpl running on the server to count down
+ *    the wait latch, which will unblock the background thread and allow it to
+ *    proceed.  After counting down the latch, this call blocks, waiting for
  *    {@link #org.apache.avro.test.Simple.ack()} to be invoked.
- * 2b. The background thread wakes up because the waitLatch has been counted 
+ * 2b. The background thread wakes up because the waitLatch has been counted
  *     down.  Now we know that some thread is executing inside hello(String).
- *     Next, execute {@link #org.apache.avro.test.Simple.ack()} in the 
- *     background thread, which will allow the thread executing hello(String) 
+ *     Next, execute {@link #org.apache.avro.test.Simple.ack()} in the
+ *     background thread, which will allow the thread executing hello(String)
  *     to return.
- * 3b. The thread executing hello(String) on the server unblocks (since ack() 
+ * 3b. The thread executing hello(String) on the server unblocks (since ack()
  *     has been called), allowing hello(String) to return.
- * 4. If control returns to the main thread, we know that two RPCs 
+ * 4. If control returns to the main thread, we know that two RPCs
  *    (hello(String) and ack()) were executing concurrently.
  */
 public class TestNettyServerConcurrentExecution {
   private Server server;
   private Transceiver transceiver;
-  
+
   @After
   public void cleanUpAfter() throws Exception {
     try {
@@ -79,41 +79,41 @@
       e.printStackTrace();
     }
   }
-  
+
   @Test(timeout=30000)
   public void test() throws Exception {
     final CountDownLatch waitLatch = new CountDownLatch(1);
     server = new NettyServer(
-        new SpecificResponder(Simple.class, new SimpleImpl(waitLatch)), 
-        new InetSocketAddress(0), 
+        new SpecificResponder(Simple.class, new SimpleImpl(waitLatch)),
+        new InetSocketAddress(0),
         new NioServerSocketChannelFactory
-          (Executors.newCachedThreadPool(), Executors.newCachedThreadPool()), 
+          (Executors.newCachedThreadPool(), Executors.newCachedThreadPool()),
         new ExecutionHandler(Executors.newCachedThreadPool()));
     server.start();
-    
+
     transceiver = new NettyTransceiver(new InetSocketAddress(
         server.getPort()), TestNettyServer.CONNECT_TIMEOUT_MILLIS);
-    
+
     // 1. Create the RPC proxy, and establish the handshake:
-    final Simple.Callback simpleClient = 
+    final Simple.Callback simpleClient =
         SpecificRequestor.getClient(Simple.Callback.class, transceiver);
     SpecificRequestor.getRemote(simpleClient);    // force handshake
-    
+
     /*
      * 2a. In a background thread, wait for the Client.hello("wait") call to be
      *    received by the server, then:
-     * 2b. Execute the Client.ack() RPC, which will unblock the 
+     * 2b. Execute the Client.ack() RPC, which will unblock the
      *     Client.hello("wait") call, allowing it to return to the main thread.
      */
     new Thread() {
       @Override
       public void run() {
-        setName(TestNettyServerConcurrentExecution.class.getSimpleName() + 
+        setName(TestNettyServerConcurrentExecution.class.getSimpleName() +
             "Ack Thread");
         try {
           // Step 2a:
           waitLatch.await();
-          
+
           // Step 2b:
           simpleClient.ack();
         } catch (InterruptedException e) {
@@ -121,27 +121,27 @@
         }
       }
     }.start();
-    
+
     /*
      * 3. Execute the Client.hello("wait") RPC, which will block until the
      *    Client.ack() call has completed in the background thread.
      */
     String response = simpleClient.hello("wait");
-    
+
     // 4. If control reaches here, both RPCs have executed concurrently
-    Assert.assertEquals("wait", response); 
+    Assert.assertEquals("wait", response);
   }
 
   /**
    * Implementation of the Simple interface for use with this unit test.
-   * If {@link #hello(String)} is called with "wait" as its argument,  
-   * {@link #waitLatch} will be counted down, and {@link #hello(String)} will 
+   * If {@link #hello(String)} is called with "wait" as its argument,
+   * {@link #waitLatch} will be counted down, and {@link #hello(String)} will
    * block until {@link #ack()} has been invoked.
    */
   private static class SimpleImpl implements Simple {
     private final CountDownLatch waitLatch;
     private final CountDownLatch ackLatch = new CountDownLatch(1);
-    
+
     /**
      * Creates a SimpleImpl that uses the given CountDownLatch.
      * @param waitLatch the CountDownLatch to use in {@link #hello(String)}.
@@ -149,20 +149,20 @@
     public SimpleImpl(final CountDownLatch waitLatch) {
       this.waitLatch = waitLatch;
     }
-    
+
     @Override
     public int add(int arg1, int arg2) throws AvroRemoteException {
       // Step 1:
       return arg1 + arg2;
     }
-    
+
     @Override
     public String hello(String greeting) throws AvroRemoteException {
       if (greeting.equals("wait")) {
         try {
           // Step 3a:
           waitLatch.countDown();
-          
+
           // Step 3b:
           ackLatch.await();
         } catch (InterruptedException e) {
@@ -172,15 +172,15 @@
       }
       return greeting;
     }
-    
+
     @Override
     public void ack() {
       // Step 2b:
       ackLatch.countDown();
     }
-    
+
     // All RPCs below this line are irrelevant to this test:
-    
+
     @Override
     public TestRecord echo(TestRecord record) throws AvroRemoteException {
       return record;
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCallbacks.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCallbacks.java
index 78816c0..3a9e158 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCallbacks.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCallbacks.java
@@ -52,17 +52,17 @@
   private static Transceiver transceiver;
   private static Simple.Callback simpleClient;
   private static final AtomicBoolean ackFlag = new AtomicBoolean(false);
-  private static final AtomicReference<CountDownLatch> ackLatch = 
+  private static final AtomicReference<CountDownLatch> ackLatch =
     new AtomicReference<CountDownLatch>(new CountDownLatch(1));
   private static Simple simpleService = new SimpleImpl(ackFlag);
-  
+
   @BeforeClass
   public static void initializeConnections() throws Exception {
     // start server
     Responder responder = new SpecificResponder(Simple.class, simpleService);
     server = new NettyServer(responder, new InetSocketAddress(0));
     server.start();
-  
+
     int serverPort = server.getPort();
     System.out.println("server port : " + serverPort);
 
@@ -70,7 +70,7 @@
         serverPort), TestNettyServer.CONNECT_TIMEOUT_MILLIS);
     simpleClient = SpecificRequestor.getClient(Simple.Callback.class, transceiver);
   }
-  
+
   @AfterClass
   public static void tearDownConnections() throws Exception {
     if (transceiver != null) {
@@ -80,18 +80,18 @@
       server.close();
     }
   }
-  
+
   @Test
   public void greeting() throws Exception {
     // Test synchronous RPC:
     Assert.assertEquals("Hello, how are you?", simpleClient.hello("how are you?"));
-    
+
     // Test asynchronous RPC (future):
     CallFuture<String> future1 = new CallFuture<String>();
     simpleClient.hello("World!", future1);
     Assert.assertEquals("Hello, World!", future1.get(2, TimeUnit.SECONDS));
     Assert.assertNull(future1.getError());
-    
+
     // Test asynchronous RPC (callback):
     final CallFuture<String> future2 = new CallFuture<String>();
     simpleClient.hello("what's up?", new Callback<String>() {
@@ -107,7 +107,7 @@
     Assert.assertEquals("Hello, what's up?", future2.get(2, TimeUnit.SECONDS));
     Assert.assertNull(future2.getError());
   }
-  
+
   @Test
   public void echo() throws Exception {
     TestRecord record = TestRecord.newBuilder().setHash(
@@ -115,16 +115,16 @@
             new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8 })).
         setKind(org.apache.avro.test.Kind.FOO).
         setName("My Record").build();
-    
+
     // Test synchronous RPC:
     Assert.assertEquals(record, simpleClient.echo(record));
-    
+
     // Test asynchronous RPC (future):
     CallFuture<TestRecord> future1 = new CallFuture<TestRecord>();
     simpleClient.echo(record, future1);
     Assert.assertEquals(record, future1.get(2, TimeUnit.SECONDS));
     Assert.assertNull(future1.getError());
-    
+
     // Test asynchronous RPC (callback):
     final CallFuture<TestRecord> future2 = new CallFuture<TestRecord>();
     simpleClient.echo(record, new Callback<TestRecord>() {
@@ -140,18 +140,18 @@
     Assert.assertEquals(record, future2.get(2, TimeUnit.SECONDS));
     Assert.assertNull(future2.getError());
   }
-  
+
   @Test
   public void add() throws Exception {
     // Test synchronous RPC:
     Assert.assertEquals(8, simpleClient.add(2, 6));
-    
+
     // Test asynchronous RPC (future):
     CallFuture<Integer> future1 = new CallFuture<Integer>();
     simpleClient.add(8, 8, future1);
     Assert.assertEquals(new Integer(16), future1.get(2, TimeUnit.SECONDS));
     Assert.assertNull(future1.getError());
-    
+
     // Test asynchronous RPC (callback):
     final CallFuture<Integer> future2 = new CallFuture<Integer>();
     simpleClient.add(512, 256, new Callback<Integer>() {
@@ -167,20 +167,20 @@
     Assert.assertEquals(new Integer(768), future2.get(2, TimeUnit.SECONDS));
     Assert.assertNull(future2.getError());
   }
-  
+
   @Test
   public void echoBytes() throws Exception {
     ByteBuffer byteBuffer = ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 });
-    
+
     // Test synchronous RPC:
     Assert.assertEquals(byteBuffer, simpleClient.echoBytes(byteBuffer));
-    
+
     // Test asynchronous RPC (future):
     CallFuture<ByteBuffer> future1 = new CallFuture<ByteBuffer>();
     simpleClient.echoBytes(byteBuffer, future1);
     Assert.assertEquals(byteBuffer, future1.get(2, TimeUnit.SECONDS));
     Assert.assertNull(future1.getError());
-    
+
     // Test asynchronous RPC (callback):
     final CallFuture<ByteBuffer> future2 = new CallFuture<ByteBuffer>();
     simpleClient.echoBytes(byteBuffer, new Callback<ByteBuffer>() {
@@ -196,7 +196,7 @@
     Assert.assertEquals(byteBuffer, future2.get(2, TimeUnit.SECONDS));
     Assert.assertNull(future2.getError());
   }
-  
+
   @Test()
   public void error() throws IOException, InterruptedException, TimeoutException {
     // Test synchronous RPC:
@@ -209,7 +209,7 @@
       e.printStackTrace();
       Assert.fail("Unexpected error: " + e.toString());
     }
-    
+
     // Test asynchronous RPC (future):
     CallFuture<Void> future = new CallFuture<Void>();
     simpleClient.error(future);
@@ -217,14 +217,14 @@
       future.get(2, TimeUnit.SECONDS);
       Assert.fail("Expected " + TestError.class.getCanonicalName() + " to be thrown");
     } catch (ExecutionException e) {
-      Assert.assertTrue("Expected " + TestError.class.getCanonicalName(), 
+      Assert.assertTrue("Expected " + TestError.class.getCanonicalName(),
           e.getCause() instanceof TestError);
     }
     Assert.assertNotNull(future.getError());
-    Assert.assertTrue("Expected " + TestError.class.getCanonicalName(), 
+    Assert.assertTrue("Expected " + TestError.class.getCanonicalName(),
         future.getError() instanceof TestError);
     Assert.assertNull(future.getResult());
-    
+
     // Test asynchronous RPC (callback):
     final CountDownLatch latch = new CountDownLatch(1);
     final AtomicReference<Throwable> errorRef = new AtomicReference<Throwable>();
@@ -243,24 +243,24 @@
     Assert.assertNotNull(errorRef.get());
     Assert.assertTrue(errorRef.get() instanceof TestError);
   }
-  
+
   @Test
   public void ack() throws Exception {
     simpleClient.ack();
     ackLatch.get().await(2, TimeUnit.SECONDS);
     Assert.assertTrue("Expected ack flag to be set", ackFlag.get());
-    
+
     ackLatch.set(new CountDownLatch(1));
     simpleClient.ack();
     ackLatch.get().await(2, TimeUnit.SECONDS);
     Assert.assertFalse("Expected ack flag to be cleared", ackFlag.get());
   }
-  
+
   @Test
   public void testSendAfterChannelClose() throws Exception {
-    // Start up a second server so that closing the server doesn't 
+    // Start up a second server so that closing the server doesn't
     // interfere with the other unit tests:
-    Server server2 = new NettyServer(new SpecificResponder(Simple.class, simpleService), 
+    Server server2 = new NettyServer(new SpecificResponder(Simple.class, simpleService),
         new InetSocketAddress(0));
     server2.start();
     try {
@@ -270,12 +270,12 @@
       Transceiver transceiver2 = new NettyTransceiver(new InetSocketAddress(
           serverPort), TestNettyServer.CONNECT_TIMEOUT_MILLIS);
       try {
-        Simple.Callback simpleClient2 = 
+        Simple.Callback simpleClient2 =
           SpecificRequestor.getClient(Simple.Callback.class, transceiver2);
 
         // Verify that connection works:
         Assert.assertEquals(3, simpleClient2.add(1, 2));
-        
+
         // Try again with callbacks:
         CallFuture<Integer> addFuture = new CallFuture<Integer>();
         simpleClient2.add(1, 2, addFuture);
@@ -285,7 +285,7 @@
         server2.close();
         Thread.sleep(1000L);
 
-        // Send a new RPC, and verify that it throws an Exception that 
+        // Send a new RPC, and verify that it throws an Exception that
         // can be detected by the client:
         boolean ioeCaught = false;
         try {
@@ -299,8 +299,8 @@
           throw e;
         }
         Assert.assertTrue("Expected IOException", ioeCaught);
-        
-        // Send a new RPC with callback, and verify that the correct Exception 
+
+        // Send a new RPC with callback, and verify that the correct Exception
         // is thrown:
         ioeCaught = false;
         try {
@@ -322,13 +322,13 @@
       server2.close();
     }
   }
-  
+
   @Test
   public void cancelPendingRequestsOnTransceiverClose() throws Exception {
-    // Start up a second server so that closing the server doesn't 
+    // Start up a second server so that closing the server doesn't
     // interfere with the other unit tests:
     BlockingSimpleImpl blockingSimpleImpl = new BlockingSimpleImpl();
-    Server server2 = new NettyServer(new SpecificResponder(Simple.class, 
+    Server server2 = new NettyServer(new SpecificResponder(Simple.class,
         blockingSimpleImpl), new InetSocketAddress(0));
     server2.start();
     try {
@@ -338,18 +338,18 @@
       CallFuture<Integer> addFuture = new CallFuture<Integer>();
       Transceiver transceiver2 = new NettyTransceiver(new InetSocketAddress(
           serverPort), TestNettyServer.CONNECT_TIMEOUT_MILLIS);
-      try {        
-        Simple.Callback simpleClient2 = 
+      try {
+        Simple.Callback simpleClient2 =
           SpecificRequestor.getClient(Simple.Callback.class, transceiver2);
-        
+
         // The first call has to block for the handshake:
         Assert.assertEquals(3, simpleClient2.add(1, 2));
-        
+
         // Now acquire the semaphore so that the server will block:
         blockingSimpleImpl.acquireRunPermit();
         simpleClient2.add(1, 2, addFuture);
       } finally {
-        // When the transceiver is closed, the CallFuture should get 
+        // When the transceiver is closed, the CallFuture should get
         // an IOException
         transceiver2.close();
       }
@@ -369,7 +369,7 @@
       server2.close();
     }
   }
-  
+
   @Test
   public void cancelPendingRequestsAfterChannelCloseByServerShutdown() throws Exception {
     // The purpose of this test is to verify that a client doesn't stay
@@ -415,25 +415,25 @@
           }
         }
       });
-      
+
       // Start client call
       t.start();
-      
+
       // Wait until method is entered on the server side
       blockingSimpleImpl.acquireEnterPermit();
-      
+
       // The server side method is now blocked waiting on the run permit
       // (= is busy handling the request)
-      
+
       // Stop the server
       server2.close();
-      
+
       // With the server gone, we expect the client to get some exception and exit
       // Wait for client thread to exit
       t.join(10000);
-      
+
       Assert.assertFalse("Client request should not be blocked on server shutdown", t.isAlive());
-      
+
     } finally {
       blockingSimpleImpl.releaseRunPermit();
       server2.close();
@@ -441,15 +441,15 @@
         transceiver2.close();
     }
   }
-  
+
   @Test
   public void clientReconnectAfterServerRestart() throws Exception {
-    // Start up a second server so that closing the server doesn't 
+    // Start up a second server so that closing the server doesn't
     // interfere with the other unit tests:
     SimpleImpl simpleImpl = new BlockingSimpleImpl();
-    Server server2 = new NettyServer(new SpecificResponder(Simple.class, 
+    Server server2 = new NettyServer(new SpecificResponder(Simple.class,
         simpleImpl), new InetSocketAddress(0));
-    try {      
+    try {
       server2.start();
       int serverPort = server2.getPort();
       System.out.println("server2 port : " + serverPort);
@@ -457,10 +457,10 @@
       // Initialize a client, and establish a connection to the server:
       Transceiver transceiver2 = new NettyTransceiver(new InetSocketAddress(
           serverPort), TestNettyServer.CONNECT_TIMEOUT_MILLIS);
-      Simple.Callback simpleClient2 = 
+      Simple.Callback simpleClient2 =
           SpecificRequestor.getClient(Simple.Callback.class, transceiver2);
       Assert.assertEquals(3, simpleClient2.add(1, 2));
-      
+
       // Restart the server:
       server2.close();
       try {
@@ -471,11 +471,11 @@
         // Expected since server is no longer running
       }
       Thread.sleep(2000L);
-      server2 = new NettyServer(new SpecificResponder(Simple.class, 
+      server2 = new NettyServer(new SpecificResponder(Simple.class,
           simpleImpl), new InetSocketAddress(serverPort));
       server2.start();
-      
-      // Invoke an RPC using the same client, which should reestablish the 
+
+      // Invoke an RPC using the same client, which should reestablish the
       // connection to the server:
       Assert.assertEquals(3, simpleClient2.add(1, 2));
     } finally {
@@ -489,7 +489,7 @@
     final int threadCount = 8;
     final long runTimeMillis = 10 * 1000L;
     ExecutorService threadPool = Executors.newFixedThreadPool(threadCount);
-    
+
     System.out.println("Running performance test for " + runTimeMillis + "ms...");
     final AtomicLong rpcCount = new AtomicLong(0L);
     final AtomicBoolean runFlag = new AtomicBoolean(true);
@@ -511,23 +511,23 @@
         }
       });
     }
-    
+
     startLatch.await(2, TimeUnit.SECONDS);
     Thread.sleep(runTimeMillis);
     runFlag.set(false);
     threadPool.shutdown();
     Assert.assertTrue("Timed out shutting down thread pool", threadPool.awaitTermination(2, TimeUnit.SECONDS));
-    System.out.println("Completed " + rpcCount.get() + " RPCs in " + runTimeMillis + 
-        "ms => " + (((double)rpcCount.get() / (double)runTimeMillis) * 1000) + " RPCs/sec, " + 
+    System.out.println("Completed " + rpcCount.get() + " RPCs in " + runTimeMillis +
+        "ms => " + (((double)rpcCount.get() / (double)runTimeMillis) * 1000) + " RPCs/sec, " +
         ((double)runTimeMillis / (double)rpcCount.get()) + " ms/RPC.");
   }
-  
+
   /**
    * Implementation of the Simple interface.
    */
   private static class SimpleImpl implements Simple {
     private final AtomicBoolean ackFlag;
-    
+
     /**
      * Creates a SimpleImpl.
      * @param ackFlag the AtomicBoolean to toggle when ack() is called.
@@ -535,7 +535,7 @@
     public SimpleImpl(final AtomicBoolean ackFlag) {
       this.ackFlag = ackFlag;
     }
-    
+
     @Override
     public String hello(String greeting) throws AvroRemoteException {
       return "Hello, " + greeting;
@@ -567,7 +567,7 @@
       ackLatch.get().countDown();
     }
   }
-  
+
   /**
    * A SimpleImpl that requires a semaphore permit before executing any method.
    */
@@ -576,14 +576,14 @@
     private final Semaphore enterSemaphore = new Semaphore(1);
     /** Semaphore that must be acquired for the method to run and exit. */
     private final Semaphore runSemaphore = new Semaphore(1);
-    
+
     /**
      * Creates a BlockingSimpleImpl.
      */
     public BlockingSimpleImpl() {
       super(new AtomicBoolean());
     }
-    
+
     @Override
     public String hello(String greeting) throws AvroRemoteException {
       releaseEnterPermit();
@@ -649,7 +649,7 @@
         releaseRunPermit();
       }
     }
-    
+
     /**
      * Acquires a single permit from the semaphore.
      */
@@ -661,7 +661,7 @@
         throw new RuntimeException(e);
     }
     }
-    
+
     /**
      * Releases a single permit to the semaphore.
      */
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCompression.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCompression.java
index 98dc9e6..d805443 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCompression.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCompression.java
@@ -60,7 +60,7 @@
         channelFactory, new CompressionChannelPipelineFactory(),
         null);
   }
-  
+
   protected static Transceiver initializeTransceiver(int serverPort) throws IOException {
     return  new NettyTransceiver(new InetSocketAddress(serverPort),
         new CompressionChannelFactory(),
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithSSL.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithSSL.java
index 1611c01..a7f8e6a 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithSSL.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithSSL.java
@@ -42,7 +42,7 @@
 public class TestNettyServerWithSSL extends TestNettyServer{
   public static final String TEST_CERTIFICATE = "servercert.p12";
   public static final String TEST_CERTIFICATE_PASSWORD = "s3cret";
-  
+
   protected static Server initializeServer(Responder responder) {
     ChannelFactory channelFactory = new NioServerSocketChannelFactory(
         Executors.newCachedThreadPool(),
@@ -52,7 +52,7 @@
         channelFactory, new SSLChannelPipelineFactory(),
         null);
   }
-  
+
   protected static Transceiver initializeTransceiver(int serverPort) throws IOException {
     return  new NettyTransceiver(new InetSocketAddress(serverPort),
         new SSLChannelFactory(),
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestRpcPluginOrdering.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestRpcPluginOrdering.java
index d816fa5..02bc63d 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestRpcPluginOrdering.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestRpcPluginOrdering.java
@@ -35,17 +35,17 @@
 public class TestRpcPluginOrdering {
 
   private static AtomicInteger orderCounter = new AtomicInteger();
-  
+
   public class OrderPlugin extends RPCPlugin{
 
     public void clientStartConnect(RPCContext context) {
       assertEquals(0, orderCounter.getAndIncrement());
     }
-    
+
     public void clientSendRequest(RPCContext context) {
       assertEquals(1, orderCounter.getAndIncrement());
     }
-    
+
     public void clientReceiveResponse(RPCContext context) {
       assertEquals(6, orderCounter.getAndIncrement());
     }
@@ -66,16 +66,16 @@
       assertEquals(4, orderCounter.getAndIncrement());
     }
   }
-  
+
   @Test
   public void testRpcPluginOrdering() throws Exception {
     OrderPlugin plugin = new OrderPlugin();
-    
+
     SpecificResponder responder = new SpecificResponder(Mail.class, new TestMailImpl());
     SpecificRequestor requestor = new SpecificRequestor(Mail.class, new LocalTransceiver(responder));
     responder.addRPCPlugin(plugin);
     requestor.addRPCPlugin(plugin);
-    
+
     Mail client = SpecificRequestor.getClient(Mail.class, requestor);
     Message message = createTestMessage();
     client.send(message);
@@ -89,7 +89,7 @@
       build();
     return message;
   }
-  
+
   private static class TestMailImpl implements Mail{
     public String send(Message message) throws AvroRemoteException {
       return "Received";
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslAnonymous.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslAnonymous.java
index 68b40bf..2034e05 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslAnonymous.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslAnonymous.java
@@ -66,9 +66,9 @@
       new SaslSocketTransceiver(new InetSocketAddress(s.getPort()));
     ProtoInterface proxy =
       (ProtoInterface)ReflectRequestor.getClient(ProtoInterface.class, client);
-    
+
     byte[] result = proxy.test(new byte[64*1024]);
-    
+
     client.close();
     s.close();
   }
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslDigestMd5.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslDigestMd5.java
index cef4f77..34651eb 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslDigestMd5.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslDigestMd5.java
@@ -115,7 +115,7 @@
     Transceiver c =
       new SaslSocketTransceiver(new InetSocketAddress(s.getPort()));
     GenericRequestor requestor = new GenericRequestor(PROTOCOL, c);
-    GenericRecord params = 
+    GenericRecord params =
       new GenericData.Record(PROTOCOL.getMessages().get("hello").getRequest());
     params.put("greeting", "bob");
     Utf8 response = (Utf8)requestor.request("hello", params);
@@ -157,7 +157,7 @@
     Transceiver c = new SaslSocketTransceiver
       (new InetSocketAddress(server.getPort()), saslClient);
     GenericRequestor requestor = new GenericRequestor(PROTOCOL, c);
-    GenericRecord params = 
+    GenericRecord params =
       new GenericData.Record(PROTOCOL.getMessages().get("hello").getRequest());
     params.put("greeting", "bob");
     Utf8 response = (Utf8)requestor.request("hello", params);
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestHistogram.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestHistogram.java
index 0bdd700..9ab6eb7 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestHistogram.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestHistogram.java
@@ -47,10 +47,10 @@
     assertArrayEquals(new int[] { 1, 1, 2, 4, 8, 4 }, h.getHistogram());
 
     assertEquals("[0,1)=1;[1,2)=1;[2,4)=2;[4,8)=4;[8,16)=8;[16,infinity)=4", h.toString());
-    
+
     String[] correctBucketLabels = {
         "[0,1)", "[1,2)", "[2,4)", "[4,8)", "[8,16)", "[16,infinity)"};
-    
+
     // test bucket iterator
     int pos = 0;
     Iterator<String> it = h.getSegmenter().getBuckets();
@@ -59,7 +59,7 @@
       pos = pos + 1;
     }
     assertEquals(correctBucketLabels.length, pos);
-    
+
     List<String> labels = h.getSegmenter().getBucketLabels();
     assertEquals(correctBucketLabels.length, labels.size());
     if (labels.size() == correctBucketLabels.length) {
@@ -71,14 +71,14 @@
     String[] correctBoundryLabels = {
         "0", "1", "2", "4", "8", "16"};
     List<String> boundryLabels = h.getSegmenter().getBoundaryLabels();
-    
+
     assertEquals(correctBoundryLabels.length, boundryLabels.size());
     if (boundryLabels.size() == correctBoundryLabels.length) {
       for (int i = 0; i < boundryLabels.size(); i++) {
         assertEquals(correctBoundryLabels[i], boundryLabels.get(i));
       }
     }
-    
+
     List<Entry<String>> entries = new ArrayList<Entry<String>>();
     for (Entry<String> entry : h.entries()) {
       entries.add(entry);
@@ -86,13 +86,13 @@
     assertEquals("[0,1)", entries.get(0).bucket);
     assertEquals(4, entries.get(5).count);
     assertEquals(6, entries.size());
-    
+
     h.add(1010);
     h.add(9191);
     List<Integer> recent = h.getRecentAdditions();
     assertTrue(recent.contains(1010));
     assertTrue(recent.contains(9191));
-    
+
   }
 
   @Test(expected=Histogram.SegmenterException.class)
@@ -110,11 +110,11 @@
     public Iterator<String> getBuckets() {
       return Arrays.asList("X").iterator();
     }
-    
+
     public List<String> getBoundaryLabels() {
       return Arrays.asList("X");
     }
-    
+
     public List<String> getBucketLabels() {
       return Arrays.asList("X");
     }
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestStatsPluginAndServlet.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestStatsPluginAndServlet.java
index eb234a5..c0f9664 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestStatsPluginAndServlet.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestStatsPluginAndServlet.java
@@ -138,12 +138,12 @@
     r.addRPCPlugin(statsPlugin);
     Transceiver t = new LocalTransceiver(r);
     makeRequest(t);
-    
+
     String resp = generateServletResponse(statsPlugin);
     assertTrue(resp.contains("Average: 2.0"));
- 
+
   }
-  
+
   private RPCContext makeContext() {
     RPCContext context = new RPCContext();
     context.setMessage(message);
@@ -197,10 +197,10 @@
     avroServer.start();
 
     StatsServer ss = new StatsServer(p, 8080);
-    
+
     HttpTransceiver trans = new HttpTransceiver(
         new URL("http://localhost:" + Integer.parseInt(args[0])));
-    GenericRequestor req = new GenericRequestor(protocol, trans); 
+    GenericRequestor req = new GenericRequestor(protocol, trans);
 
     while(true) {
       Thread.sleep(1000);
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java
index 1a05a39..4518583 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java
@@ -49,7 +49,7 @@
 import org.apache.avro.generic.GenericRecord;
 
 public class TestSpecificData {
-  
+
   @Test
   /** Make sure that even with nulls, hashCode() doesn't throw NPE. */
   public void testHashCode() {
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificDatumReader.java b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificDatumReader.java
index 4b73de7..5b6cca3 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificDatumReader.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificDatumReader.java
@@ -40,7 +40,7 @@
 public class TestSpecificDatumReader {
 
   public static byte[] serializeRecord(FooBarSpecificRecord fooBarSpecificRecord) throws IOException {
-    SpecificDatumWriter<FooBarSpecificRecord> datumWriter = 
+    SpecificDatumWriter<FooBarSpecificRecord> datumWriter =
         new SpecificDatumWriter<FooBarSpecificRecord>(FooBarSpecificRecord.SCHEMA$);
     ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
     Encoder encoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
@@ -67,14 +67,14 @@
     newBuilder.setNicknames(Arrays.asList("bar"));
     newBuilder.setRelatedids(Arrays.asList(1,2,3));
     FooBarSpecificRecord specificRecord = newBuilder.build();
-    
+
     byte[] recordBytes = serializeRecord(specificRecord);
-    
+
     Decoder decoder = DecoderFactory.get().binaryDecoder(recordBytes, null);
     SpecificDatumReader<FooBarSpecificRecord> specificDatumReader = new SpecificDatumReader<FooBarSpecificRecord>(FooBarSpecificRecord.SCHEMA$);
     FooBarSpecificRecord deserialized = new FooBarSpecificRecord();
     specificDatumReader.read(deserialized, decoder);
-    
+
     assertEquals(specificRecord, deserialized);
   }
 
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificErrorBuilder.java b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificErrorBuilder.java
index 598a22a..1de8805 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificErrorBuilder.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificErrorBuilder.java
@@ -30,7 +30,7 @@
     TestError.Builder testErrorBuilder = TestError.newBuilder().
       setValue("value").setCause(new NullPointerException()).
       setMessage$("message$");
-    
+
     // Test has methods
     Assert.assertTrue(testErrorBuilder.hasValue());
     Assert.assertNotNull(testErrorBuilder.getValue());
@@ -38,23 +38,23 @@
     Assert.assertNotNull(testErrorBuilder.getCause());
     Assert.assertTrue(testErrorBuilder.hasMessage$());
     Assert.assertNotNull(testErrorBuilder.getMessage$());
-    
+
     TestError testError = testErrorBuilder.build();
     Assert.assertEquals("value", testError.getValue());
     Assert.assertEquals("value", testError.getMessage());
     Assert.assertEquals("message$", testError.getMessage$());
-    
+
     // Test copy constructor
-    Assert.assertEquals(testErrorBuilder, 
+    Assert.assertEquals(testErrorBuilder,
         TestError.newBuilder(testErrorBuilder));
     Assert.assertEquals(testErrorBuilder, TestError.newBuilder(testError));
-    
+
     TestError error = new TestError("value", new NullPointerException());
     error.setMessage$("message");
     Assert.assertEquals(error,
         TestError.newBuilder().setValue("value").
           setCause(new NullPointerException()).setMessage$("message").build());
-    
+
     // Test clear
     testErrorBuilder.clearValue();
     Assert.assertFalse(testErrorBuilder.hasValue());
@@ -66,7 +66,7 @@
     Assert.assertFalse(testErrorBuilder.hasMessage$());
     Assert.assertNull(testErrorBuilder.getMessage$());
   }
-  
+
   @Test(expected=org.apache.avro.AvroRuntimeException.class)
   public void attemptToSetNonNullableFieldToNull() {
     TestError.newBuilder().setMessage$(null);
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificRecordBuilder.java b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificRecordBuilder.java
index a94b498..7305757 100644
--- a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificRecordBuilder.java
+++ b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificRecordBuilder.java
@@ -57,7 +57,7 @@
     Assert.assertNull(builder.getFriends());
     Assert.assertFalse(builder.hasLanguages());
     Assert.assertNull(builder.getLanguages());
-    
+
     Person person = builder.build();
     Assert.assertEquals("James Gosling", person.getName().toString());
     Assert.assertEquals(new Integer(1955), person.getYearOfBirth());
@@ -69,11 +69,11 @@
     Assert.assertEquals(2, person.getLanguages().size());
     Assert.assertEquals("English", person.getLanguages().get(0).toString());
     Assert.assertEquals("Java", person.getLanguages().get(1).toString());
-    
+
     // Test copy constructors:
     Assert.assertEquals(builder, Person.newBuilder(builder));
     Assert.assertEquals(person, Person.newBuilder(person).build());
-    
+
     Person.Builder builderCopy = Person.newBuilder(person);
     Assert.assertEquals("James Gosling", builderCopy.getName().toString());
     Assert.assertEquals(new Integer(1955), builderCopy.getYearOfBirth());
@@ -81,7 +81,7 @@
     Assert.assertEquals("CA", builderCopy.getState().toString());
     Assert.assertNotNull(builderCopy.getFriends());  // friends should default to an empty list
     Assert.assertEquals(0, builderCopy.getFriends().size());
-    
+
     // Test clearing fields:
     builderCopy.clearFriends().clearCountry();
     Assert.assertFalse(builderCopy.hasFriends());
@@ -92,7 +92,7 @@
     Assert.assertNotNull(person2.getFriends());
     Assert.assertTrue(person2.getFriends().isEmpty());
   }
-  
+
   @Test
   public void testUnions() {
     long datetime = 1234L;
@@ -106,15 +106,15 @@
     Assert.assertEquals(datetime, p.getDatetime().longValue());
     Assert.assertEquals(ProductPage.class, p.getPageContext().getClass());
     Assert.assertEquals(product, ((ProductPage)p.getPageContext()).getProduct());
-    
+
     PageView p2 = PageView.newBuilder(p).build();
-    
+
     Assert.assertEquals(datetime, p2.getDatetime().longValue());
     Assert.assertEquals(ProductPage.class, p2.getPageContext().getClass());
     Assert.assertEquals(product, ((ProductPage)p2.getPageContext()).getProduct());
-    
+
     Assert.assertEquals(p, p2);
-    
+
   }
 
   @Test
@@ -136,7 +136,7 @@
         .setStringField("MyInterop")
         .setUnionField(2.71828)
         .build();
-    
+
     Interop copy = Interop.newBuilder(interop).build();
     Assert.assertEquals(interop.getArrayField().size(), copy.getArrayField().size());
     Assert.assertEquals(interop.getArrayField(), copy.getArrayField());
@@ -154,7 +154,7 @@
     Assert.assertEquals(interop.getUnionField(), copy.getUnionField());
     Assert.assertEquals(interop, copy);
   }
-  
+
   @Test(expected=org.apache.avro.AvroRuntimeException.class)
   public void attemptToSetNonNullableFieldToNull() {
     Person.newBuilder().setName(null);
@@ -202,11 +202,11 @@
     }
     long durationNanos = System.nanoTime() - startTimeNanos;
     double durationMillis = durationNanos / 1e6d;
-    System.out.println("Built " + count + " records in " + durationMillis + "ms (" + 
-        (count / (durationMillis / 1000d)) + " records/sec, " + (durationMillis / count) + 
+    System.out.println("Built " + count + " records in " + durationMillis + "ms (" +
+        (count / (durationMillis / 1000d)) + " records/sec, " + (durationMillis / count) +
         "ms/record");
   }
-  
+
   @Ignore
   @Test
   public void testBuilderPerformanceWithDefaultValues() {
@@ -217,8 +217,8 @@
     }
     long durationNanos = System.nanoTime() - startTimeNanos;
     double durationMillis = durationNanos / 1e6d;
-    System.out.println("Built " + count + " records in " + durationMillis + "ms (" + 
-        (count / (durationMillis / 1000d)) + " records/sec, " + (durationMillis / count) + 
+    System.out.println("Built " + count + " records in " + durationMillis + "ms (" +
+        (count / (durationMillis / 1000d)) + " records/sec, " + (durationMillis / count) +
         "ms/record");
   }
 
@@ -241,8 +241,8 @@
     }
     long durationNanos = System.nanoTime() - startTimeNanos;
     double durationMillis = durationNanos / 1e6d;
-    System.out.println("Built " + count + " records in " + durationMillis + "ms (" + 
-        (count / (durationMillis / 1000d)) + " records/sec, " + (durationMillis / count) + 
+    System.out.println("Built " + count + " records in " + durationMillis + "ms (" +
+        (count / (durationMillis / 1000d)) + " records/sec, " + (durationMillis / count) +
         "ms/record");
   }
 }
diff --git a/lang/java/mapred/pom.xml b/lang/java/mapred/pom.xml
index 62b5544..4e2f417 100644
--- a/lang/java/mapred/pom.xml
+++ b/lang/java/mapred/pom.xml
@@ -159,7 +159,7 @@
       <type>test-jar</type>
       <scope>test</scope>
     </dependency>
-    <dependency>     
+    <dependency>
       <groupId>org.easymock</groupId>
       <artifactId>easymock</artifactId>
       <scope>test</scope>
@@ -184,7 +184,7 @@
       <version>${commons-codec.version}</version>
     </dependency>
   </dependencies>
-  
+
   <profiles>
      <profile>
       <id>hadoop1</id>
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/HadoopCodecFactory.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/HadoopCodecFactory.java
index 1810208..9927ef9 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/HadoopCodecFactory.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/HadoopCodecFactory.java
@@ -23,7 +23,7 @@
 import org.apache.avro.AvroRuntimeException;
 import org.apache.avro.file.CodecFactory;
 
-/**  
+/**
  * Encapsulates the ability to specify and configure an avro compression codec
  * from a given hadoop codec defined with the configuration parameter:
  * mapred.output.compression.codec
@@ -40,14 +40,14 @@
 
   private static final Map<String, String> HADOOP_AVRO_NAME_MAP =
       new HashMap<String, String>();
- 
+
   static {
     HADOOP_AVRO_NAME_MAP.put("org.apache.hadoop.io.compress.DeflateCodec", "deflate");
     HADOOP_AVRO_NAME_MAP.put("org.apache.hadoop.io.compress.SnappyCodec", "snappy");
     HADOOP_AVRO_NAME_MAP.put("org.apache.hadoop.io.compress.BZip2Codec", "bzip2");
     HADOOP_AVRO_NAME_MAP.put("org.apache.hadoop.io.compress.GZipCodec", "deflate");
   }
-  
+
   /** Maps a hadoop codec name into a CodecFactory.
   *
   * Currently there are four hadoop codecs registered:
@@ -71,7 +71,7 @@
     }
     return o;
   }
-  
+
   public static String getAvroCodecName(String hadoopCodecClass) {
     return HADOOP_AVRO_NAME_MAP.get(hadoopCodecClass);
   }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/SortedKeyValueFile.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/SortedKeyValueFile.java
index f7a41bf..6b07220 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/SortedKeyValueFile.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/SortedKeyValueFile.java
@@ -226,7 +226,7 @@
       mDataFileReader =
         new DataFileReader<GenericRecord>
         (new FsInput(dataFilePath, options.getConfiguration()), datumReader);
-      
+
     }
 
     /**
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java
index 73ab045..fd0fd8f 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java
@@ -96,7 +96,7 @@
         options.getFileSystem(), options.getConfigurationWithAvroSerialization(),
         options.getOutputPath(), options.getKeyClass(), options.getValueClass(),
         options.getBufferSizeBytes(), options.getReplicationFactor(),
-        options.getBlockSizeBytes(), 
+        options.getBlockSizeBytes(),
         options.getCompressionType(), options.getCompressionCodec(),
         options.getProgressable(), options.getMetadataWithAvroSchemas());
   }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextInputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextInputFormat.java
index ca7dab8..d0da121 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextInputFormat.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextInputFormat.java
@@ -39,7 +39,7 @@
  * <p>
  * This {@link org.apache.hadoop.mapred.InputFormat} is useful for applications
  * that wish to process Avro data using tools like MapReduce Streaming.
- * 
+ *
  * By default, when pointed at a directory, this will silently skip over any
  * files in it that do not have .avro extension. To instead include all files,
  * set the avro.mapred.ignore.inputs.without.extension property to false.
@@ -59,7 +59,7 @@
       return super.listStatus(job);
     }
   }
-  
+
   @Override
   public RecordReader<Text, Text>
     getRecordReader(InputSplit split, JobConf job, Reporter reporter)
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextRecordReader.java
index 517b472..2ed2a61 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextRecordReader.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextRecordReader.java
@@ -54,11 +54,11 @@
   public Text createKey() {
     return new Text();
   }
-  
+
   public Text createValue() {
     return new Text();
   }
-    
+
   public boolean next(Text key, Text ignore) throws IOException {
     if (!reader.hasNext() || reader.pastSync(end))
       return false;
@@ -80,7 +80,7 @@
     }
     return true;
   }
-  
+
   public float getProgress() throws IOException {
     if (end == start) {
       return 0.0f;
@@ -88,12 +88,12 @@
       return Math.min(1.0f, (getPos() - start) / (float)(end - start));
     }
   }
-  
+
   public long getPos() throws IOException {
     return reader.tell();
   }
 
   public void close() throws IOException { reader.close(); }
-  
+
 
 }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroInputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroInputFormat.java
index 252339a..5d8bad0 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroInputFormat.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroInputFormat.java
@@ -33,7 +33,7 @@
 
 /**
  * An {@link org.apache.hadoop.mapred.InputFormat} for Avro data files.
- * 
+ *
  * By default, when pointed at a directory, this will silently skip over any
  * files in it that do not have .avro extension. To instead include all files,
  * set the avro.mapred.ignore.inputs.without.extension property to false.
@@ -44,11 +44,11 @@
   /** Whether to silently ignore input files without the .avro extension */
   public static final String IGNORE_FILES_WITHOUT_EXTENSION_KEY =
       "avro.mapred.ignore.inputs.without.extension";
-  
+
   /** Default of whether to silently ignore input files without the .avro
    * extension. */
   public static final boolean IGNORE_INPUTS_WITHOUT_EXTENSION_DEFAULT = true;
-  
+
   @Override
   protected FileStatus[] listStatus(JobConf job) throws IOException {
     if (job.getBoolean(IGNORE_FILES_WITHOUT_EXTENSION_KEY,
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroJob.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroJob.java
index 21f130c..4e2a3c9 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroJob.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroJob.java
@@ -127,12 +127,12 @@
     setInputReflect(job);
     setMapOutputReflect(job);
   }
-  
+
   /** Indicate that a job's input data should use reflect representation.*/
   public static void setInputReflect(JobConf job) {
     job.setBoolean(INPUT_IS_REFLECT, true);
   }
-  
+
   /** Indicate that a job's map output data should use reflect representation.*/
   public static void setMapOutputReflect(JobConf job) {
     job.setBoolean(MAP_OUTPUT_IS_REFLECT, true);
@@ -202,7 +202,7 @@
   public static void setDataModelClass(JobConf job, Class<? extends GenericData> modelClass) {
     job.setClass(CONF_DATA_MODEL, modelClass, GenericData.class);
   }
-  
+
   /** Return the job's data model implementation class. */
   public static Class<? extends GenericData> getDataModelClass(Configuration conf) {
     return (Class<? extends GenericData>) conf.getClass(
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleOutputs.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleOutputs.java
index a1b4a1c..5ae03e3 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleOutputs.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleOutputs.java
@@ -42,10 +42,10 @@
 
 
 /**
- * The AvroMultipleOutputs class simplifies writing Avro output data 
+ * The AvroMultipleOutputs class simplifies writing Avro output data
  * to multiple outputs
- * 
- * <p> 
+ *
+ * <p>
  * Case one: writing to additional outputs other than the job default output.
  *
  * Each additional output, or named output, may be configured with its own
@@ -57,16 +57,16 @@
  * <p>
  * Case two: to write data to different files provided by user
  * </p>
- * 
+ *
  * <p>
- * AvroMultipleOutputs supports counters, by default they are disabled. The 
- * counters group is the {@link AvroMultipleOutputs} class name. The names of the 
- * counters are the same as the output name. These count the number of records 
+ * AvroMultipleOutputs supports counters, by default they are disabled. The
+ * counters group is the {@link AvroMultipleOutputs} class name. The names of the
+ * counters are the same as the output name. These count the number of records
  * written to each output name. For multi
  * named outputs the name of the counter is the concatenation of the named
  * output, and underscore '_' and the multiname.
  * </p>
- * 
+ *
  * Usage pattern for job submission:
  * <pre>
  *
@@ -79,7 +79,7 @@
  * job.setReducerClass(HadoopReducer.class);
  * job.set("avro.reducer",MyAvroReducer.class);
  * ...
- *  
+ *
  * Schema schema;
  * ...
  * // Defines additional single output 'avro1' for the job
@@ -98,7 +98,7 @@
  * <p>
  * Usage in Reducer:
  * <pre>
- * 
+ *
  * public class MyAvroReducer extends
  *   AvroReducer&lt;K, V, OUT&gt; {
  * private MultipleOutputs amos;
@@ -140,8 +140,8 @@
   private static final String MULTI = ".multi";
 
   private static final String COUNTERS_ENABLED = "mo.counters";
- 
- 
+
+
   /**
    * Counters group used by the counters of MultipleOutputs.
    */
@@ -444,7 +444,7 @@
       writer.close(reporter);
     }
   }
-  
+
   /**
    * Output Collector for the default schema.
    * <p/>
@@ -457,7 +457,7 @@
   public void collect(String namedOutput, Reporter reporter,Object datum) throws IOException{
     getCollector(namedOutput,reporter).collect(datum);
   }
-  
+
   /**
    * OutputCollector with custom schema.
    * <p/>
@@ -471,7 +471,7 @@
   public void collect(String namedOutput, Reporter reporter, Schema schema,Object datum) throws IOException{
     getCollector(namedOutput,reporter,schema).collect(datum);
   }
-  
+
   /**
    * OutputCollector with custom schema and file name.
    * <p/>
@@ -486,7 +486,7 @@
   public void collect(String namedOutput,Reporter reporter,Schema schema,Object datum,String baseOutputPath) throws IOException{
     getCollector(namedOutput,null,reporter,baseOutputPath,schema).collect(datum);
   }
-  
+
   /**
    * Gets the output collector for a named output.
    * <p/>
@@ -508,14 +508,14 @@
       throws IOException{
     return getCollector(namedOutput,null,reporter,namedOutput,schema);
   }
-  
+
   /**
    * Gets the output collector for a named output.
    * <p/>
    *
    * @param namedOutput the named output name
    * @param reporter    the reporter
-   * @param multiName   the multiname 
+   * @param multiName   the multiname
    * @return the output collector for the given named output
    * @throws IOException thrown if output collector could not be created
    */
@@ -530,8 +530,8 @@
       throws IOException{
     //namedOutputs.add(baseFileName);
     return getCollector(namedOutput,null,reporter,baseFileName,schema);
-  }  
-  
+  }
+
   /**
    * Gets the output collector for a multi named output.
    * <p/>
@@ -568,18 +568,18 @@
       getRecordWriter(namedOutput, baseFileName, reporter,schema);
 
     return new AvroCollector() {
-   
+
       @SuppressWarnings({"unchecked"})
       public void collect(Object key) throws IOException{
        AvroWrapper wrapper = new AvroWrapper(key);
        writer.write(wrapper, NullWritable.get());
       }
-      
+
       public void collect(Object key,Object value) throws IOException
       {
         writer.write(key,value);
-      }  
-    
+      }
+
     };
   }
 
@@ -597,7 +597,7 @@
       writer.close(null);
     }
   }
-  
+
   private static class InternalFileOutputFormat extends FileOutputFormat<Object, Object> {
    public static final String CONFIG_NAMED_OUTPUT = "mo.config.namedOutput";
 
@@ -620,7 +620,7 @@
    }
    OutputFormat outputFormat = outputConf.getOutputFormat();
    return outputFormat.getRecordWriter(fs, outputConf, fileName, arg3);
-   }   
+   }
   }
 }
 
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroOutputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroOutputFormat.java
index 2a681cd..235f768 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroOutputFormat.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroOutputFormat.java
@@ -76,16 +76,16 @@
   public static void setSyncInterval(JobConf job, int syncIntervalInBytes) {
     job.setInt(SYNC_INTERVAL_KEY, syncIntervalInBytes);
   }
-  
+
   static <T> void configureDataFileWriter(DataFileWriter<T> writer,
       JobConf job) throws UnsupportedEncodingException {
-    
+
     CodecFactory factory = getCodecFactory(job);
-    
+
     if (factory != null) {
-      writer.setCodec(factory);  
+      writer.setCodec(factory);
     }
-    
+
     writer.setSyncInterval(job.getInt(SYNC_INTERVAL_KEY, DEFAULT_SYNC_INTERVAL));
 
     // copy metadata from job
@@ -107,16 +107,16 @@
    *   <li>Use avro.output.codec if populated</li>
    *   <li>Next use mapred.output.compression.codec if populated</li>
    *   <li>If not default to Deflate Codec</li>
-   * </ul>  
+   * </ul>
    */
   static CodecFactory getCodecFactory(JobConf job) {
     CodecFactory factory = null;
-    
+
     if (FileOutputFormat.getCompressOutput(job)) {
       int deflateLevel = job.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
       int xzLevel = job.getInt(XZ_LEVEL_KEY, DEFAULT_XZ_LEVEL);
       String codecName = job.get(AvroJob.OUTPUT_CODEC);
-      
+
       if (codecName == null) {
         String codecClassName = job.get("mapred.output.compression.codec", null);
         String avroCodecName = HadoopCodecFactory.getAvroCodecName(codecClassName);
@@ -127,7 +127,7 @@
         } else {
           return CodecFactory.deflateCodec(deflateLevel);
         }
-      } else { 
+      } else {
         if ( codecName.equals(DEFLATE_CODEC)) {
           factory = CodecFactory.deflateCodec(deflateLevel);
         } else if ( codecName.equals(XZ_CODEC)) {
@@ -137,7 +137,7 @@
         }
       }
     }
-    
+
     return factory;
   }
 
@@ -155,7 +155,7 @@
 
     final DataFileWriter<T> writer =
       new DataFileWriter<T>(dataModel.createDatumWriter(null));
-    
+
     configureDataFileWriter(writer, job);
 
     Path path = FileOutputFormat.getTaskOutputPath(job, name+EXT);
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroRecordReader.java
index c173d05..351d3c5 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroRecordReader.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroRecordReader.java
@@ -56,9 +56,9 @@
   public AvroWrapper<T> createKey() {
     return new AvroWrapper<T>(null);
   }
-  
+
   public NullWritable createValue() { return NullWritable.get(); }
-    
+
   public boolean next(AvroWrapper<T> wrapper, NullWritable ignore)
     throws IOException {
     if (!reader.hasNext() || reader.pastSync(end))
@@ -66,7 +66,7 @@
     wrapper.datum(reader.next(wrapper.datum()));
     return true;
   }
-  
+
   public float getProgress() throws IOException {
     if (end == start) {
       return 0.0f;
@@ -74,12 +74,12 @@
       return Math.min(1.0f, (getPos() - start) / (float)(end - start));
     }
   }
-  
+
   public long getPos() throws IOException {
     return reader.tell();
   }
 
   public void close() throws IOException { reader.close(); }
-  
+
 }
 
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroSerialization.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroSerialization.java
index 92501bf..fa8334b 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroSerialization.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroSerialization.java
@@ -38,13 +38,13 @@
 import org.apache.avro.io.EncoderFactory;
 
 /** The {@link Serialization} used by jobs configured with {@link AvroJob}. */
-public class AvroSerialization<T> extends Configured 
+public class AvroSerialization<T> extends Configured
   implements Serialization<AvroWrapper<T>> {
 
   public boolean accept(Class<?> c) {
     return AvroWrapper.class.isAssignableFrom(c);
   }
-  
+
   /** Returns the specified map output deserializer.  Defaults to the final
    * output deserializer if no map output schema was specified. */
   public Deserializer<AvroWrapper<T>> getDeserializer(Class<AvroWrapper<T>> c) {
@@ -57,7 +57,7 @@
     DatumReader<T> datumReader = dataModel.createDatumReader(schema);
     return new AvroWrapperDeserializer(datumReader, isKey);
   }
-  
+
   private static final DecoderFactory FACTORY = DecoderFactory.get();
 
   private class AvroWrapperDeserializer
@@ -66,16 +66,16 @@
     private DatumReader<T> reader;
     private BinaryDecoder decoder;
     private boolean isKey;
-    
+
     public AvroWrapperDeserializer(DatumReader<T> reader, boolean isKey) {
       this.reader = reader;
       this.isKey = isKey;
     }
-    
+
     public void open(InputStream in) {
       this.decoder = FACTORY.directBinaryDecoder(in, decoder);
     }
-    
+
     public AvroWrapper<T> deserialize(AvroWrapper<T> wrapper)
       throws IOException {
       T datum = reader.read(wrapper == null ? null : wrapper.datum(), decoder);
@@ -90,9 +90,9 @@
     public void close() throws IOException {
       decoder.inputStream().close();
     }
-    
+
   }
-  
+
   /** Returns the specified output serializer. */
   public Serializer<AvroWrapper<T>> getSerializer(Class<AvroWrapper<T>> c) {
     // AvroWrapper used for final output, AvroKey or AvroValue for map output
@@ -112,7 +112,7 @@
     private DatumWriter<T> writer;
     private OutputStream out;
     private BinaryEncoder encoder;
-    
+
     public AvroWrapperSerializer(DatumWriter<T> writer) {
       this.writer = writer;
     }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java
index ef1fae9..917e894 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java
@@ -49,7 +49,7 @@
     throws IOException {
 
     Schema schema = Schema.create(Schema.Type.BYTES);
-    
+
     final byte[] keyValueSeparator =
       job.get("mapreduce.output.textoutputformat.separator", "\t").getBytes(UTF8);
 
@@ -63,17 +63,17 @@
 
     return new AvroTextRecordWriter(writer, keyValueSeparator);
   }
-  
+
   class AvroTextRecordWriter implements RecordWriter<K, V> {
     private final DataFileWriter<ByteBuffer> writer;
     private final byte[] keyValueSeparator;
-    
+
     public AvroTextRecordWriter(DataFileWriter<ByteBuffer> writer,
         byte[] keyValueSeparator) {
       this.writer = writer;
       this.keyValueSeparator = keyValueSeparator;
     }
-    
+
     public void write(K key, V value) throws IOException {
       boolean nullKey = key == null || key instanceof NullWritable;
       boolean nullValue = value == null || value instanceof NullWritable;
@@ -87,11 +87,11 @@
         writer.append(toByteBuffer(key, keyValueSeparator, value));
       }
     }
-    
+
     public void close(Reporter reporter) throws IOException {
       writer.close();
     }
-    
+
     private ByteBuffer toByteBuffer(Object o) throws IOException {
       if (o instanceof Text) {
         Text to = (Text) o;
@@ -100,7 +100,7 @@
         return ByteBuffer.wrap(o.toString().getBytes(UTF8));
       }
     }
-    
+
     private ByteBuffer toByteBuffer(Object key, byte[] sep, Object value)
         throws IOException {
       byte[] keyBytes, valBytes;
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroUtf8InputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroUtf8InputFormat.java
index ac91109..abd8ae4 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroUtf8InputFormat.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroUtf8InputFormat.java
@@ -49,15 +49,15 @@
     RecordReader<AvroWrapper<Utf8>, NullWritable> {
 
     private LineRecordReader lineRecordReader;
-    
+
     private LongWritable currentKeyHolder = new LongWritable();
     private Text currentValueHolder = new Text();
-    
-    public Utf8LineRecordReader(Configuration job, 
+
+    public Utf8LineRecordReader(Configuration job,
         FileSplit split) throws IOException {
       this.lineRecordReader = new LineRecordReader(job, split);
     }
-    
+
     public void close() throws IOException {
       lineRecordReader.close();
     }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroWrapper.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroWrapper.java
index 12c4d9e..71112af 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroWrapper.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroWrapper.java
@@ -33,7 +33,7 @@
 
   /** Set the wrapped datum. */
   public void datum(T datum) { this.datum = datum; }
-  
+
   public int hashCode() {
     return (datum == null) ? 0 : datum.hashCode();
   }
@@ -53,7 +53,7 @@
       return false;
     return true;
   }
-    
+
   /** Get the wrapped datum as JSON. */
   @Override
   public String toString() {
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopCombiner.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopCombiner.java
index 5f914fb..2717510 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopCombiner.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopCombiner.java
@@ -41,7 +41,7 @@
     private final AvroKey<K> keyWrapper = new AvroKey<K>(null);
     private final AvroValue<V> valueWrapper = new AvroValue<V>(null);
     private OutputCollector<AvroKey<K>,AvroValue<V>> collector;
-  
+
     public PairCollector(OutputCollector<AvroKey<K>,AvroValue<V>> collector) {
       this.collector = collector;
     }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopMapper.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopMapper.java
index 35f11d6..54e1609 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopMapper.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopMapper.java
@@ -33,7 +33,7 @@
  * otherwise assumed to be pairs that are split. */
 class HadoopMapper<IN,OUT,K,V,KO,VO> extends MapReduceBase
   implements Mapper<AvroWrapper<IN>, NullWritable, KO, VO> {
-    
+
   private AvroMapper<IN,OUT> mapper;
   private MapCollector<OUT,K,V,KO,VO> out;
   private boolean isMapOnly;
@@ -48,8 +48,8 @@
   }
 
   @Override
-  public void map(AvroWrapper<IN> wrapper, NullWritable value, 
-                  OutputCollector<KO,VO> collector, 
+  public void map(AvroWrapper<IN> wrapper, NullWritable value,
+                  OutputCollector<KO,VO> collector,
                   Reporter reporter) throws IOException {
     if (this.out == null)
       this.out = new MapCollector<OUT,K,V,KO,VO>(collector, isMapOnly);
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducerBase.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducerBase.java
index 6874969..b806f76 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducerBase.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducerBase.java
@@ -29,10 +29,10 @@
 
 abstract class HadoopReducerBase<K,V,OUT,KO,VO> extends MapReduceBase
   implements Reducer<AvroKey<K>, AvroValue<V>, KO, VO> {
-  
+
   private AvroReducer<K,V,OUT> reducer;
   private AvroCollector<OUT> collector;
-  
+
   protected abstract AvroReducer<K,V,OUT> getReducer(JobConf conf);
   protected abstract AvroCollector<OUT> getCollector(OutputCollector<KO,VO> c);
 
@@ -52,9 +52,9 @@
 
   @Override
   public final void reduce(AvroKey<K> key, Iterator<AvroValue<V>> values,
-                           OutputCollector<KO, VO> out, 
+                           OutputCollector<KO, VO> out,
                            Reporter reporter) throws IOException {
-    if (this.collector == null) 
+    if (this.collector == null)
       this.collector = getCollector(out);
     reduceIterable.values = values;
     reducer.reduce(key.datum(), reduceIterable, collector, reporter);
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/Pair.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/Pair.java
index 010b08d..a33de99 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/Pair.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/Pair.java
@@ -74,7 +74,7 @@
     return pair.getField(VALUE).schema();
   }
 
-  private static final Map<Schema,Map<Schema,Schema>> SCHEMA_CACHE = 
+  private static final Map<Schema,Map<Schema,Schema>> SCHEMA_CACHE =
     new WeakHashMap<Schema,Map<Schema,Schema>>();
 
   /** Get a pair schema. */
@@ -144,7 +144,7 @@
     case 0: return key;
     case 1: return value;
     default: throw new org.apache.avro.AvroRuntimeException("Bad index: "+i);
-    } 
+    }
   }
 
   @Override @SuppressWarnings("unchecked")
@@ -153,7 +153,7 @@
     case 0: this.key = (K)o;    break;
     case 1: this.value = (V)o;  break;
     default: throw new org.apache.avro.AvroRuntimeException("Bad index: "+i);
-    } 
+    }
   }
 
   private static final Schema STRING_SCHEMA = Schema.create(Type.STRING);
@@ -510,11 +510,11 @@
   //   {"Double", "DOUBLE_SCHEMA"},
   //   {"Void", "NULL_SCHEMA"},
   // };
-  
+
   // private static String f(String pattern, String value) {
   //   return java.text.MessageFormat.format(pattern, value);
   // }
-  
+
   // public static void main(String... args) throws Exception {
   //   StringBuffer b = new StringBuffer();
   //   for (String[] k : TABLE) {
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileReader.java
index 83c9de1..36c9b61 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileReader.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileReader.java
@@ -148,7 +148,7 @@
       reader.sync(position);
     ready = false;
   }
-  
+
   @Override public boolean pastSync(long position) throws IOException {
     return reader.getPosition() >= position && reader.syncSeen();
   }
@@ -179,7 +179,7 @@
   private static class WritableData extends ReflectData {
     private static final WritableData INSTANCE = new WritableData();
     protected WritableData() {}
-    
+
     /** Return the singleton instance. */
     public static WritableData get() { return INSTANCE; }
 
@@ -194,7 +194,7 @@
   private interface Converter<T> {
     T convert(Writable o);
   }
-  
+
   private static final Map<Type,Converter> WRITABLE_CONVERTERS =
     new HashMap<Type,Converter>();
   static {
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileRecordReader.java
index 693f34e..bb014a3 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileRecordReader.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileRecordReader.java
@@ -31,6 +31,6 @@
     super(new SequenceFileReader<K,V>(split.getPath().toUri(), job),
           split);
   }
-  
+
 }
 
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherData.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherData.java
index 6365745..f4620ca 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherData.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherData.java
@@ -33,7 +33,7 @@
 
   /** Set the count of records in the buffer.  Used for task input only. */
   public void count(int count) { this.count = count; }
-    
+
   /** Return the buffer. */
   public ByteBuffer buffer() { return buffer; }
 
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherInputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherInputFormat.java
index de0ee26..e680324 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherInputFormat.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherInputFormat.java
@@ -36,7 +36,7 @@
 
 /**
  * An {@link org.apache.hadoop.mapred.InputFormat} for tethered Avro input.
- * 
+ *
  * By default, when pointed at a directory, this will silently skip over any
  * files in it that do not have .avro extension. To instead include all files,
  * set the avro.mapred.ignore.inputs.without.extension property to false.
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherJob.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherJob.java
index 169699f..1c16618 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherJob.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherJob.java
@@ -42,7 +42,7 @@
   public static final String TETHER_EXEC_ARGS="avro.tether.executable_args";
   public static final String TETHER_EXEC_CACHED="avro.tether.executable_cached";
   public static final String TETHER_PROTOCOL="avro.tether.protocol";
-  
+
   /** Get the URI of the application's executable. */
   public static URI getExecutable(JobConf job) {
     try {
@@ -51,15 +51,15 @@
       throw new RuntimeException(e);
     }
   }
-  
+
   /** Set the URI for the application's executable. Normally this in HDFS. */
   public static void setExecutable(JobConf job, File executable) {
     setExecutable(job,executable, new ArrayList<String>(),false);
   }
-  
+
   /**
-   * Set the URI for the application's executable (i.e the program to run in a subprocess 
-   * and provides the mapper/reducer). 
+   * Set the URI for the application's executable (i.e the program to run in a subprocess
+   * and provides the mapper/reducer).
    * @param job - Job
    * @param executable - The URI of the executable
    * @param args - List of additional arguments; Null if no arguments
@@ -114,7 +114,7 @@
     setupTetherJob(conf);
     return new JobClient(conf).submitJob(conf);
   }
-  
+
   /**
    * Determines which transport protocol (e.g http or sasl) used to communicate
    * between the parent and subprocess
@@ -147,7 +147,7 @@
 
     // set the map output key class to TetherData
     job.setMapOutputKeyClass(TetherData.class);
-    
+
     // if protocol isn't set
     if (job.getStrings(TETHER_PROTOCOL)==null) {
       job.set(TETHER_PROTOCOL, "sasl");
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeyComparator.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeyComparator.java
index f1b74b0..3ecdb46 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeyComparator.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeyComparator.java
@@ -52,7 +52,7 @@
   @Override
   public int compare(TetherData x, TetherData y) {
     ByteBuffer b1 = x.buffer(), b2 = y.buffer();
-    int diff = BinaryData.compare(b1.array(), b1.position(), 
+    int diff = BinaryData.compare(b1.array(), b1.position(),
                                   b2.array(), b2.position(),
                                   schema);
     return diff == 0 ? -1 : diff;
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeySerialization.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeySerialization.java
index b91053e..7dbd6fe 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeySerialization.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeySerialization.java
@@ -39,20 +39,20 @@
   public boolean accept(Class<?> c) {
     return TetherData.class.isAssignableFrom(c);
   }
-  
+
   public Deserializer<TetherData> getDeserializer(Class<TetherData> c) {
     return new TetherDataDeserializer();
   }
-  
+
   private static final DecoderFactory FACTORY = DecoderFactory.get();
 
   private class TetherDataDeserializer implements Deserializer<TetherData> {
     private BinaryDecoder decoder;
-    
+
     public void open(InputStream in) {
       this.decoder = FACTORY.directBinaryDecoder(in, decoder);
     }
-    
+
     public TetherData deserialize(TetherData datum) throws IOException {
       if (datum == null) datum = new TetherData();
       datum.buffer(decoder.readBytes(datum.buffer()));
@@ -63,7 +63,7 @@
       decoder.inputStream().close();
     }
   }
-  
+
   public Serializer<TetherData> getSerializer(Class<TetherData> c) {
     return new TetherDataSerializer();
   }
@@ -72,7 +72,7 @@
 
     private OutputStream out;
     private BinaryEncoder encoder;
-    
+
     public void open(OutputStream out) {
       this.out = out;
       this.encoder = EncoderFactory.get().directBinaryEncoder(out, encoder);
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherMapRunner.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherMapRunner.java
index c8b335f..04c7f20 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherMapRunner.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherMapRunner.java
@@ -56,10 +56,10 @@
       // configure it
       LOG.info("send configure to subprocess for map task");
       process.inputClient.configure
-        (TaskType.MAP, 
+        (TaskType.MAP,
          job.get(AvroJob.INPUT_SCHEMA),
          AvroJob.getMapOutputSchema(job).toString());
-         
+
       LOG.info("send partitions to subprocess for map task");
       process.inputClient.partitions(job.getNumReduceTasks());
 
@@ -91,5 +91,5 @@
         process.close();
     }
   }
-  
+
 }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputFormat.java
index 8365938..7206947 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputFormat.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputFormat.java
@@ -53,7 +53,7 @@
     throws IOException {
 
     Schema schema = AvroJob.getOutputSchema(job);
-    
+
     final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter());
 
     if (FileOutputFormat.getCompressOutput(job)) {
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherPartitioner.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherPartitioner.java
index ff0c619..eae1722 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherPartitioner.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherPartitioner.java
@@ -29,7 +29,7 @@
 import org.apache.avro.mapred.AvroJob;
 
 class TetherPartitioner implements Partitioner<TetherData, NullWritable> {
-  
+
   private static final ThreadLocal<Integer> CACHE = new ThreadLocal<Integer>();
 
   private Schema schema;
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherRecordReader.java
index 33c06a8..2b8240c 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherRecordReader.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherRecordReader.java
@@ -55,9 +55,9 @@
   public Schema getSchema() { return reader.getSchema(); }
 
   public TetherData createKey() { return new TetherData(); }
-  
+
   public NullWritable createValue() { return NullWritable.get(); }
-    
+
   public boolean next(TetherData data, NullWritable ignore)
     throws IOException {
     if (!reader.hasNext() || reader.pastSync(end))
@@ -66,7 +66,7 @@
     data.count((int)reader.getBlockCount());
     return true;
   }
-  
+
   public float getProgress() throws IOException {
     if (end == start) {
       return 0.0f;
@@ -74,11 +74,11 @@
       return Math.min(1.0f, (in.tell() - start) / (float)(end - start));
     }
   }
-  
+
   public long getPos() throws IOException {
     return in.tell();
   }
 
   public void close() throws IOException { reader.close(); }
-  
+
 }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherReducer.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherReducer.java
index 35b4231..0647832 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherReducer.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherReducer.java
@@ -40,7 +40,7 @@
     this.job = job;
   }
 
-  public void reduce(TetherData datum, Iterator<NullWritable> ignore, 
+  public void reduce(TetherData datum, Iterator<NullWritable> ignore,
                      OutputCollector<TetherData, NullWritable> collector,
                      Reporter reporter) throws IOException {
     try {
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetheredProcess.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetheredProcess.java
index 8ad8e8b..142905a 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetheredProcess.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetheredProcess.java
@@ -101,7 +101,7 @@
       }
 
       outputServer.start();
-      
+
       // start sub-process, connecting back to server
       this.subprocess = startSubprocess(job);
 
@@ -227,5 +227,5 @@
     builder.environment().putAll(env);
     return builder.start();
   }
-  
+
 }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyRecordWriter.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyRecordWriter.java
index ce7bc58..e403123 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyRecordWriter.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyRecordWriter.java
@@ -67,7 +67,7 @@
    */
   public AvroKeyRecordWriter(Schema writerSchema, GenericData dataModel,
       CodecFactory compressionCodec, OutputStream outputStream) throws IOException {
-    this(writerSchema, dataModel, compressionCodec, outputStream, 
+    this(writerSchema, dataModel, compressionCodec, outputStream,
         DataFileConstants.DEFAULT_SYNC_INTERVAL);
   }
 
@@ -82,10 +82,10 @@
   public void close(TaskAttemptContext context) throws IOException {
     mAvroFileWriter.close();
   }
-  
+
   /** {@inheritDoc} */
   @Override
   public long sync() throws IOException {
     return mAvroFileWriter.sync();
-  }  
+  }
 }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordWriter.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordWriter.java
index 71baa41..ee68c48 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordWriter.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordWriter.java
@@ -106,10 +106,10 @@
   public AvroKeyValueRecordWriter(AvroDatumConverter<K, ?> keyConverter,
       AvroDatumConverter<V, ?> valueConverter, GenericData dataModel,
       CodecFactory compressionCodec, OutputStream outputStream) throws IOException {
-    this(keyConverter, valueConverter, dataModel, compressionCodec, outputStream, 
+    this(keyConverter, valueConverter, dataModel, compressionCodec, outputStream,
         DataFileConstants.DEFAULT_SYNC_INTERVAL);
   }
-  
+
   /**
    * Gets the writer schema for the key/value pair generic record.
    *
@@ -137,5 +137,5 @@
   @Override
   public long sync() throws IOException {
     return mAvroFileWriter.sync();
-  }  
+  }
 }
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java
index 9db8c68..3f8d7e0 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java
@@ -41,10 +41,10 @@
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
- * The AvroMultipleOutputs class simplifies writing Avro output data 
+ * The AvroMultipleOutputs class simplifies writing Avro output data
  * to multiple outputs
- * 
- * <p> 
+ *
+ * <p>
  * Case one: writing to additional outputs other than the job default output.
  *
  * Each additional output, or named output, may be configured with its own
@@ -53,14 +53,14 @@
  * <p>
  * Case two: to write data to different files provided by user
  * </p>
- * 
+ *
  * <p>
- * AvroMultipleOutputs supports counters, by default they are disabled. The 
- * counters group is the {@link AvroMultipleOutputs} class name. The names of the 
- * counters are the same as the output name. These count the number of records 
- * written to each output name. 
+ * AvroMultipleOutputs supports counters, by default they are disabled. The
+ * counters group is the {@link AvroMultipleOutputs} class name. The names of the
+ * counters are the same as the output name. These count the number of records
+ * written to each output name.
  * </p>
- * 
+ *
  * Usage pattern for job submission:
  * <pre>
  *
@@ -72,18 +72,18 @@
  * job.setMapperClass(MyAvroMapper.class);
  * job.setReducerClass(MyAvroReducer.class);
  * ...
- *  
+ *
  * Schema schema;
  * ...
  * // Defines additional single output 'avro1' for the job
  * AvroMultipleOutputs.addNamedOutput(job, "avro1", AvroKeyValueOutputFormat.class,
- * keyschema, valueSchema);  // valueSchema can be set to null if there only Key to be written 
+ * keyschema, valueSchema);  // valueSchema can be set to null if there only Key to be written
                                    to file in the RecordWriter
  *
  * // Defines additional output 'avro2' with different schema for the job
  * AvroMultipleOutputs.addNamedOutput(job, "avro2",
  *   AvroKeyOutputFormat.class,
- *   schema,null); 
+ *   schema,null);
  * ...
  *
  * job.waitForCompletion(true);
@@ -92,7 +92,7 @@
  * <p>
  * Usage in Reducer:
  * <pre>
- * 
+ *
  * public class MyAvroReducer extends
  *   Reducer&lt;K, V, T, NullWritable&gt; {
  * private MultipleOutputs amos;
@@ -126,18 +126,18 @@
 
   private static final String MULTIPLE_OUTPUTS = "avro.mapreduce.multipleoutputs";
 
-  private static final String MO_PREFIX = 
+  private static final String MO_PREFIX =
     "avro.mapreduce.multipleoutputs.namedOutput.";
 
   private static final String FORMAT = ".format";
-  private static final String COUNTERS_ENABLED = 
+  private static final String COUNTERS_ENABLED =
     "avro.mapreduce.multipleoutputs.counters";
 
   /**
    * Counters group used by the counters of MultipleOutputs.
    */
   private static final String COUNTERS_GROUP = AvroMultipleOutputs.class.getName();
-  
+
   /**
    * Cache for the taskContexts
    */
@@ -181,7 +181,7 @@
       throw new IllegalArgumentException("output name cannot be 'part'");
     }
   }
-  
+
   /**
    * Checks if a named output name is valid.
    *
@@ -270,7 +270,7 @@
 
   /**
    * Enables or disables counters for the named outputs.
-   * 
+   *
    * The counters group is the {@link AvroMultipleOutputs} class name.
    * The names of the counters are the same as the named outputs. These
    * counters count the number records written to each output name.
@@ -287,7 +287,7 @@
    * Returns if the counters for the named outputs are enabled or not.
    * By default these counters are disabled.
    *
-   * @param job    the job 
+   * @param job    the job
    * @return TRUE if the counters are enabled, FALSE if they are disabled.
    */
   public static boolean getCountersEnabled(JobContext job) {
@@ -295,7 +295,7 @@
   }
 
   /**
-   * Wraps RecordWriter to increment counters. 
+   * Wraps RecordWriter to increment counters.
    */
   @SuppressWarnings("unchecked")
   private static class RecordWriterWithCounter extends RecordWriter {
@@ -311,13 +311,13 @@
     }
 
     @SuppressWarnings({"unchecked"})
-    public void write(Object key, Object value) 
+    public void write(Object key, Object value)
         throws IOException, InterruptedException {
       context.getCounter(COUNTERS_GROUP, counterName).increment(1);
       writer.write(key, value);
     }
 
-    public void close(TaskAttemptContext context) 
+    public void close(TaskAttemptContext context)
         throws IOException, InterruptedException {
       writer.close(context);
     }
@@ -329,7 +329,7 @@
   private Set<String> namedOutputs;
   private Map<String, RecordWriter<?, ?>> recordWriters;
   private boolean countersEnabled;
-  
+
   /**
    * Creates and initializes multiple outputs support,
    * it should be instantiated in the Mapper/Reducer setup method.
@@ -350,7 +350,7 @@
    *
    * Output path is a unique file generated for the namedOutput.
    * For example, {namedOutput}-(m|r)-{part-number}
-   * 
+   *
    * @param namedOutput the named output name
    * @param key         the key , value is NullWritable
    */
@@ -367,7 +367,7 @@
    *
    * Output path is a unique file generated for the namedOutput.
    * For example, {namedOutput}-(m|r)-{part-number}
-   * 
+   *
    * @param namedOutput the named output name
    * @param key         the key
    * @param value       the value
@@ -380,7 +380,7 @@
 
   /**
    * Write key and value to baseOutputPath using the namedOutput.
-   * 
+   *
    * @param namedOutput    the named output name
    * @param key            the key
    * @param value          the value
@@ -402,26 +402,26 @@
 
   /**
    * Write key value to an output file name.
-   * 
-   * Gets the record writer from job's output format.  
+   *
+   * Gets the record writer from job's output format.
    * Job's output format should be a FileOutputFormat.
-   * 
+   *
    * @param key       the key
    * @param value     the value
    * @param baseOutputPath base-output path to write the record to.
    * Note: Framework will generate unique filename for the baseOutputPath
    */
-  public void write(Object key, Object value, String baseOutputPath) 
+  public void write(Object key, Object value, String baseOutputPath)
       throws IOException, InterruptedException {
         write(key, value, null, null, baseOutputPath);
   }
-  
+
   /**
    * Write key value to an output file name.
-   * 
+   *
    * Gets the record writer from job's output format. Job's output format should
    * be a FileOutputFormat.
-   * 
+   *
    * @param key   the key
    * @param value the value
    * @param keySchema   keySchema to use
@@ -441,13 +441,13 @@
   }
 
   /**
-   * 
+   *
    * Gets the record writer from job's output format. Job's output format should
-   * be a FileOutputFormat.If the record writer implements Syncable then returns 
+   * be a FileOutputFormat.If the record writer implements Syncable then returns
    * the current position as a value that may be passed to DataFileReader.seek(long)
-   * otherwise returns -1. 
+   * otherwise returns -1.
    * Forces the end of the current block, emitting a synchronization marker.
-   * 
+   *
    * @param namedOutput   the namedOutput
    * @param baseOutputPath base-output path to write the record to. Note: Framework will
    *          generate unique filename for the baseOutputPath
@@ -472,12 +472,12 @@
   // MultithreadedMapper.
   @SuppressWarnings("unchecked")
   private synchronized RecordWriter getRecordWriter(
-      TaskAttemptContext taskContext, String baseFileName) 
+      TaskAttemptContext taskContext, String baseFileName)
       throws IOException, InterruptedException {
-    
+
     // look for record-writer in the cache
     RecordWriter writer = recordWriters.get(baseFileName);
-    
+
     // If not in cache, create a new one
     if (writer == null) {
       // get the record writer from context output format
@@ -490,13 +490,13 @@
       } catch (ClassNotFoundException e) {
         throw new IOException(e);
       }
- 
-      // if counters are enabled, wrap the writer with context 
-      // to increment counters 
+
+      // if counters are enabled, wrap the writer with context
+      // to increment counters
       if (countersEnabled) {
         writer = new RecordWriterWithCounter(writer, baseFileName, context);
       }
-      
+
       // add the record-writer to the cache
       recordWriters.put(baseFileName, writer);
     }
@@ -521,7 +521,7 @@
 
   }
 
-   // Create a taskAttemptContext for the named output with 
+   // Create a taskAttemptContext for the named output with
    // output format and output key/value types put in the context
   @SuppressWarnings("deprecation")
   private TaskAttemptContext getContext(String nameOutput) throws IOException {
@@ -547,13 +547,13 @@
     setSchema(job, keySchema, valSchema);
     taskContext = createTaskAttemptContext(
       job.getConfiguration(), context.getTaskAttemptID());
-    
+
     taskContexts.put(nameOutput, taskContext);
-    
+
     return taskContext;
   }
-  
-  private TaskAttemptContext createTaskAttemptContext(Configuration conf, 
+
+  private TaskAttemptContext createTaskAttemptContext(Configuration conf,
       TaskAttemptID taskId) {
     // Use reflection since the context types changed incompatibly between 1.0
     // and 2.0.
@@ -566,7 +566,7 @@
       throw new IllegalStateException(e);
     }
   }
-  
+
   private Class<?> getTaskAttemptContextClass() {
     try {
       return Class.forName(
@@ -580,14 +580,14 @@
       }
     }
   }
-  
+
   /**
    * Closes all the opened outputs.
-   * 
+   *
    * This should be called from cleanup method of map/reduce task.
    * If overridden subclasses must invoke <code>super.close()</code> at the
    * end of their <code>close()</code>
-   * 
+   *
    */
   @SuppressWarnings("unchecked")
   public void close() throws IOException, InterruptedException {
@@ -597,4 +597,4 @@
   }
 }
 
- 
+
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroOutputFormatBase.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroOutputFormatBase.java
index 5f77190..c702c9b 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroOutputFormatBase.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroOutputFormatBase.java
@@ -36,7 +36,7 @@
  * @param <V> The type of value to write.
  */
 public abstract class AvroOutputFormatBase<K, V> extends FileOutputFormat<K, V> {
-  
+
   /**
    * Gets the configured compression codec from the task context.
    *
@@ -52,7 +52,7 @@
       int xzLevel = context.getConfiguration().getInt(
               org.apache.avro.mapred.AvroOutputFormat.XZ_LEVEL_KEY,
               CodecFactory.DEFAULT_XZ_LEVEL);
-      
+
       String outputCodec = context.getConfiguration()
         .get(AvroJob.CONF_OUTPUT_CODEC);
 
@@ -72,7 +72,7 @@
         } else {
           return CodecFactory.fromString(outputCodec);
         }
-      
+
       }
 
     // No compression.
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java
index af340d8..9132866 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java
@@ -23,35 +23,35 @@
 import static org.junit.Assert.assertTrue;
 
 public class TestHadoopCodecFactory {
-  
+
   @Test
   public void testHadoopCodecFactoryDeflate(){
     CodecFactory hadoopDeflateCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.DeflateCodec");
     CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate");
     assertTrue(hadoopDeflateCodec.getClass().equals(avroDeflateCodec.getClass()));
   }
-  
+
   @Test
   public void testHadoopCodecFactorySnappy(){
     CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.SnappyCodec");
     CodecFactory avroSnappyCodec = CodecFactory.fromString("snappy");
     assertTrue(hadoopSnappyCodec.getClass().equals(avroSnappyCodec.getClass()));
   }
-  
+
   @Test
   public void testHadoopCodecFactoryBZip2(){
     CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.BZip2Codec");
     CodecFactory avroSnappyCodec = CodecFactory.fromString("bzip2");
     assertTrue(hadoopSnappyCodec.getClass().equals(avroSnappyCodec.getClass()));
   }
-  
+
   @Test
   public void testHadoopCodecFactoryGZip(){
     CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.GZipCodec");
     CodecFactory avroSnappyCodec = CodecFactory.fromString("deflate");
     assertTrue(hadoopSnappyCodec.getClass().equals(avroSnappyCodec.getClass()));
   }
-  
+
   @Test
   public void testHadoopCodecFactoryFail(){
     CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.FooCodec");
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
index 81d35ff..9961814 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
@@ -34,26 +34,26 @@
 import org.junit.Test;
 
 public class TestAvroInputFormat {
-  
+
   private static final String TEST_DIR = System.getProperty("test.dir", ".") +
       File.separator + TestAvroInputFormat.class.getName();
   private JobConf conf;
   private FileSystem fs;
   private Path inputDir;
-  
+
   @Before
   public void setUp() throws Exception {
     conf = new JobConf();
     fs = FileSystem.getLocal(conf);
     inputDir = new Path(TEST_DIR);
   }
-  
-  
+
+
   @After
   public void tearDown() throws Exception {
     fs.delete(inputDir, true);
   }
-  
+
   @SuppressWarnings("rawtypes")
   @Test
   public void testIgnoreFilesWithoutExtension() throws Exception {
@@ -62,15 +62,15 @@
     Path textFile = new Path(inputDir, "someotherfile.txt");
     fs.create(avroFile).close();
     fs.create(textFile).close();
-    
+
     FileInputFormat.setInputPaths(conf, inputDir);
 
-    
+
     AvroInputFormat inputFormat = new AvroInputFormat();
     FileStatus[] statuses = inputFormat.listStatus(conf);
     Assert.assertEquals(1, statuses.length);
     Assert.assertEquals("somefile.avro", statuses[0].getPath().getName());
-    
+
     conf.setBoolean(AvroInputFormat.IGNORE_FILES_WITHOUT_EXTENSION_KEY, false);
     statuses = inputFormat.listStatus(conf);
     Assert.assertEquals(2, statuses.length);
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
index e520c87..98205ba 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
@@ -64,22 +64,22 @@
         amos.getCollector("myavro2",reporter)
           .collect(new Pair<Utf8,Long>(new Utf8(tok),1L).toString());
       }
-        
+
     }
     public void close() throws IOException {
       amos.close();
     }
 
   }
-  
+
   public static class ReduceImpl
     extends AvroReducer<Utf8, Long, Pair<Utf8, Long> > {
     private AvroMultipleOutputs amos;
-    
+
     public void configure(JobConf Job)
     {
         amos=new AvroMultipleOutputs(Job);
-    }    
+    }
 
     @Override
     public void reduce(Utf8 word, Iterable<Long> counts,
@@ -99,7 +99,7 @@
     {
       amos.close();
     }
-  }    
+  }
 
   @Test public void runTestsInOrder() throws Exception {
     testJob();
@@ -110,70 +110,70 @@
     testJob_noreducer();
     testProjection_noreducer();
   }
-  
+
   @SuppressWarnings("deprecation")
   public void testJob() throws Exception {
     JobConf job = new JobConf();
-    
+
 //    private static final String UTF8 = "UTF-8";
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out");
-    
+
     outputPath.getFileSystem(job).delete(outputPath);
     WordCountUtil.writeLinesFile();
-    
+
     job.setJobName("AvroMultipleOutputs");
-    
+
     AvroJob.setInputSchema(job, Schema.create(Schema.Type.STRING));
     AvroJob.setOutputSchema(job,
                             new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema());
-    
-    AvroJob.setMapperClass(job, MapImpl.class);        
+
+    AvroJob.setMapperClass(job, MapImpl.class);
     AvroJob.setReducerClass(job, ReduceImpl.class);
-    
+
     FileInputFormat.setInputPaths(job, new Path(dir + "/in"));
     FileOutputFormat.setOutputPath(job, outputPath);
     FileOutputFormat.setCompressOutput(job, false);
     AvroMultipleOutputs.addNamedOutput(job,"myavro",AvroOutputFormat.class, new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema());
     AvroMultipleOutputs.addNamedOutput(job,"myavro1",AvroOutputFormat.class, Schema.create(Schema.Type.STRING));
-    AvroMultipleOutputs.addNamedOutput(job,"myavro2",AvroOutputFormat.class, Schema.create(Schema.Type.STRING));   
+    AvroMultipleOutputs.addNamedOutput(job,"myavro2",AvroOutputFormat.class, Schema.create(Schema.Type.STRING));
     WordCountUtil.setMeta(job);
 
 
     JobClient.runJob(job);
-    
+
     WordCountUtil.validateCountsFile();
   }
-  
+
   @SuppressWarnings("deprecation")
   public void testProjection() throws Exception {
     JobConf job = new JobConf();
-    
+
     Integer defaultRank = new Integer(-1);
-    
-    String jsonSchema = 
+
+    String jsonSchema =
       "{\"type\":\"record\"," +
       "\"name\":\"org.apache.avro.mapred.Pair\","+
-      "\"fields\": [ " + 
+      "\"fields\": [ " +
         "{\"name\":\"rank\", \"type\":\"int\", \"default\": -1}," +
-        "{\"name\":\"value\", \"type\":\"long\"}" + 
+        "{\"name\":\"value\", \"type\":\"long\"}" +
       "]}";
-    
+
     Schema readerSchema = Schema.parse(jsonSchema);
-    
+
     AvroJob.setInputSchema(job, readerSchema);
-    
+
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path inputPath = new Path(dir + "/out" + "/myavro-r-00000.avro");
     FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
     FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
 
-    
+
     AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<Pair<Integer, Long>>(job, fileSplit);
-    
+
     AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<Pair<Integer, Long>>(null);
     NullWritable ignore = NullWritable.get();
-    
+
     long sumOfCounts = 0;
     long numOfCounts = 0;
     while(recordReader.next(inputPair, ignore)) {
@@ -181,47 +181,47 @@
       sumOfCounts += (Long) inputPair.datum().get(1);
       numOfCounts++;
     }
-    
+
     Assert.assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
-    
+
     long actualSumOfCounts = 0;
     for(Long count : WordCountUtil.COUNTS.values()) {
       actualSumOfCounts += count;
     }
-    
+
     Assert.assertEquals(sumOfCounts, actualSumOfCounts);
 
   }
-  
+
   @SuppressWarnings("deprecation")
   public void testProjection_newmethods() throws Exception {
     JobConf job = new JobConf();
-    
+
     Integer defaultRank = new Integer(-1);
-    
-    String jsonSchema = 
+
+    String jsonSchema =
       "{\"type\":\"record\"," +
       "\"name\":\"org.apache.avro.mapred.Pair\","+
-      "\"fields\": [ " + 
+      "\"fields\": [ " +
         "{\"name\":\"rank\", \"type\":\"int\", \"default\": -1}," +
-        "{\"name\":\"value\", \"type\":\"long\"}" + 
+        "{\"name\":\"value\", \"type\":\"long\"}" +
       "]}";
-    
+
     Schema readerSchema = Schema.parse(jsonSchema);
-    
+
     AvroJob.setInputSchema(job, readerSchema);
-    
+
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path inputPath = new Path(dir + "/out" + "/testavrofile-r-00000.avro");
     FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
     FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
 
-    
+
     AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<Pair<Integer, Long>>(job, fileSplit);
-    
+
     AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<Pair<Integer, Long>>(null);
     NullWritable ignore = NullWritable.get();
-    
+
     long sumOfCounts = 0;
     long numOfCounts = 0;
     while(recordReader.next(inputPair, ignore)) {
@@ -229,18 +229,18 @@
       sumOfCounts += (Long) inputPair.datum().get(1);
       numOfCounts++;
     }
-    
+
     Assert.assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
-    
+
     long actualSumOfCounts = 0;
     for(Long count : WordCountUtil.COUNTS.values()) {
       actualSumOfCounts += count;
     }
-    
+
     Assert.assertEquals(sumOfCounts, actualSumOfCounts);
 
   }
-  
+
 
   @SuppressWarnings("deprecation")
   // Test for a differnt schema output
@@ -269,7 +269,7 @@
     }
     Assert.assertEquals(sumOfCounts, actualSumOfCounts);
   }
-  
+
   @SuppressWarnings("deprecation")
   // Test for a differnt schema output
   public void testProjection_newmethods_1() throws Exception {
@@ -323,7 +323,7 @@
     AvroMultipleOutputs.addNamedOutput(job,"myavro2",AvroOutputFormat.class, Schema.create(Schema.Type.STRING));
     JobClient.runJob(job);
   }
-  
+
   public void testProjection_noreducer() throws Exception {
     JobConf job = new JobConf();
     long onel = 1;
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java
index 6de4710..6f5c0a0 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java
@@ -35,107 +35,107 @@
     assertEquals(newSyncInterval, jobConf.getInt(
         AvroOutputFormat.SYNC_INTERVAL_KEY, -1));
   }
-  
+
   @Test
   public void testNoCodec() throws UnsupportedEncodingException {
-    
-    
+
+
     JobConf job = new JobConf();
     assertTrue(AvroOutputFormat.getCodecFactory(job) == null);
-    
+
     job = new JobConf();
     job.set("mapred.output.compress", "false");
     job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.BZip2Codec");
     assertTrue(AvroOutputFormat.getCodecFactory(job) == null);
-    
+
     job = new JobConf();
     job.set("mapred.output.compress", "false");
     job.set(AvroJob.OUTPUT_CODEC, "bzip2");
     assertTrue(AvroOutputFormat.getCodecFactory(job) == null);
   }
-  
+
   @Test
   public void testBZip2CodecUsingHadoopClass() throws UnsupportedEncodingException {
     CodecFactory avroBZip2Codec = CodecFactory.fromString("bzip2");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.BZip2Codec");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroBZip2Codec.getClass()));    
+    assertTrue(factory.getClass().equals(avroBZip2Codec.getClass()));
   }
-  
+
   @Test
   public void testBZip2CodecUsingAvroCodec() throws UnsupportedEncodingException {
     CodecFactory avroBZip2Codec = CodecFactory.fromString("bzip2");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set(AvroJob.OUTPUT_CODEC, "bzip2");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroBZip2Codec.getClass()));    
+    assertTrue(factory.getClass().equals(avroBZip2Codec.getClass()));
   }
-  
+
   @Test
   public void testDeflateCodecUsingHadoopClass() throws UnsupportedEncodingException {
     CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.DeflateCodec");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));    
+    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));
   }
-  
+
   @Test
   public void testDeflateCodecUsingAvroCodec() throws UnsupportedEncodingException {
     CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set(AvroJob.OUTPUT_CODEC, "deflate");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));    
+    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));
   }
-  
+
   @Test
   public void testSnappyCodecUsingHadoopClass() throws UnsupportedEncodingException {
     CodecFactory avroSnappyCodec = CodecFactory.fromString("snappy");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.SnappyCodec");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroSnappyCodec.getClass()));    
+    assertTrue(factory.getClass().equals(avroSnappyCodec.getClass()));
   }
-  
+
   @Test
   public void testSnappyCodecUsingAvroCodec() throws UnsupportedEncodingException {
     CodecFactory avroSnappyCodec = CodecFactory.fromString("snappy");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set(AvroJob.OUTPUT_CODEC, "snappy");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroSnappyCodec.getClass()));    
+    assertTrue(factory.getClass().equals(avroSnappyCodec.getClass()));
   }
-  
+
   @Test
   public void testGZipCodecUsingHadoopClass() throws UnsupportedEncodingException {
     CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GZipCodec");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));    
+    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));
   }
 
 
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
index 264251a..e4743cc 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
@@ -38,7 +38,7 @@
 import org.junit.Test;
 
 public class TestAvroTextOutputFormat {
-  
+
   private static final String UTF8 = "UTF-8";
 
   @Test
@@ -52,12 +52,12 @@
     fileWriter.create(schema, file);
     RecordWriter<Object, Object> rw = new AvroTextOutputFormat<Object, Object>()
       .new AvroTextRecordWriter(fileWriter, "\t".getBytes(UTF8));
-    
+
     rw.write(null, null);
     rw.write(null, NullWritable.get());
     rw.write(NullWritable.get(), null);
     rw.write(NullWritable.get(), NullWritable.get());
-    
+
     rw.write("k1", null);
     rw.write("k2", NullWritable.get());
 
@@ -66,7 +66,7 @@
 
     rw.write("k3", "v3");
     rw.write(new Text("k4"), new Text("v4"));
-    
+
     rw.close(null);
 
     DatumReader<ByteBuffer> reader = new GenericDatumReader<ByteBuffer>();
@@ -80,7 +80,7 @@
     assertEquals("k4\tv4", asString(fileReader.next()));
     assertFalse("End", fileReader.hasNext());
   }
-  
+
   private String asString(ByteBuffer buf) throws UnsupportedEncodingException {
     byte[] b = new byte[buf.remaining()];
     buf.get(b);
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
index 7273bdd..17bc18d 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
@@ -27,7 +27,7 @@
 import org.junit.Test;
 
 public class TestAvroTextSort {
-  
+
   @Test
   /**
    * Run the identity job on a "bytes" Avro file using AvroAsTextInputFormat
@@ -37,19 +37,19 @@
     JobConf job = new JobConf();
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out");
-    
+
     outputPath.getFileSystem(job).delete(outputPath);
     WordCountUtil.writeLinesBytesFile();
-    
+
     job.setInputFormat(AvroAsTextInputFormat.class);
     job.setOutputFormat(AvroTextOutputFormat.class);
     job.setOutputKeyClass(Text.class);
-    
+
     FileInputFormat.setInputPaths(job, new Path(dir + "/in"));
     FileOutputFormat.setOutputPath(job, outputPath);
-    
+
     JobClient.runJob(job);
-    
+
     WordCountUtil.validateSortedFile();
   }
 
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java
index 5dcbb6c..2c61598 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java
@@ -59,7 +59,7 @@
   private static Schema createSchema() {
     List<Field> fields = new ArrayList<Schema.Field>();
 
-      
+
     fields.add(new Field("Optional", createArraySchema(), "", new ArrayList<Object>()));
 
     Schema recordSchema =
@@ -73,7 +73,7 @@
     for (int i = 0; i < 5; i++) {
       schemas.add(createInnerSchema("optional_field_" + i));
     }
-        
+
     Schema unionSchema = Schema.createUnion(schemas);
     return Schema.createArray(unionSchema);
   }
@@ -96,7 +96,7 @@
     file.writeChars("aa bb cc\ndd ee ff\n");
     file.close();
   }
-    
+
   @After
     public void tearDown() throws IOException {
     FileUtil.fullyDelete(new File(dir));
@@ -106,9 +106,9 @@
     extends MapReduceBase
     implements Mapper<LongWritable, Text,
                AvroWrapper<Pair<Long, GenericData.Record>>, NullWritable> {
-      
-    public void map(LongWritable key, Text value, 
-                    OutputCollector<AvroWrapper<Pair<Long,GenericData.Record>>,NullWritable> out, 
+
+    public void map(LongWritable key, Text value,
+                    OutputCollector<AvroWrapper<Pair<Long,GenericData.Record>>,NullWritable> out,
                     Reporter reporter) throws IOException {
       GenericData.Record optional_entry =
         new GenericData.Record(createInnerSchema("optional_field_1"));
@@ -124,7 +124,7 @@
                   (new Pair<Long,GenericData.Record>(key.get(), container)),
                   NullWritable.get());
     }
-  }  
+  }
 
 
   @Test
@@ -132,10 +132,10 @@
     JobConf job = new JobConf();
     Path outputPath = new Path(dir + "/out");
     outputPath.getFileSystem(job).delete(outputPath);
-        
+
     job.setInputFormat(TextInputFormat.class);
     FileInputFormat.setInputPaths(job, dir + "/in");
-        
+
     job.setMapperClass(AvroTestConverter.class);
     job.setNumReduceTasks(0);
 
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java
index e8a63f1..1b6ab68 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java
@@ -81,7 +81,7 @@
                                                new Count(1L)));
     }
   }
-  
+
   public static class ReduceImpl
     extends AvroReducer<Text, Count, WordCount> {
     @Override
@@ -93,7 +93,7 @@
         sum += count.count;
       collector.collect(new WordCount(word.text, sum));
     }
-  }    
+  }
 
   @Test
   @SuppressWarnings("deprecation")
@@ -107,25 +107,25 @@
     inputPath.getFileSystem(job).delete(inputPath);
 
     writeLinesFile(new File(dir+"/in"));
-    
+
     job.setJobName("reflect");
-    
+
     AvroJob.setInputSchema(job, ReflectData.get().getSchema(Text.class));
     AvroJob.setMapOutputSchema
       (job, new Pair(new Text(""), new Count(0L)).getSchema());
     AvroJob.setOutputSchema(job, ReflectData.get().getSchema(WordCount.class));
-    
-    AvroJob.setMapperClass(job, MapImpl.class);        
+
+    AvroJob.setMapperClass(job, MapImpl.class);
     //AvroJob.setCombinerClass(job, ReduceImpl.class);
     AvroJob.setReducerClass(job, ReduceImpl.class);
-    
+
     FileInputFormat.setInputPaths(job, inputPath);
     FileOutputFormat.setOutputPath(job, outputPath);
 
     AvroJob.setReflect(job);                      // use reflection
 
     JobClient.runJob(job);
-    
+
     validateCountsFile(new File(new File(dir, "out"), "part-00000.avro"));
   }
 
@@ -139,7 +139,7 @@
       out.append(new Text(line));
     out.close();
   }
-  
+
   private void validateCountsFile(File file) throws Exception {
     DatumReader<WordCount> reader = new ReflectDatumReader<WordCount>();
     InputStream in = new BufferedInputStream(new FileInputStream(file));
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
index 1a3c966..9d3bda2 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
@@ -104,7 +104,7 @@
     Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");
 
     output.getFileSystem(job).delete(output);
-    
+
     // configure input for Avro from sequence file
     AvroJob.setInputSequenceFile(job);
     FileInputFormat.setInputPaths(job, FILE.toURI().toString());
@@ -116,7 +116,7 @@
     // configure output for avro
     AvroJob.setOutputSchema(job, SCHEMA);
     FileOutputFormat.setOutputPath(job, output);
-    
+
     JobClient.runJob(job);
 
     checkFile(new DataFileReader<Pair<Long,CharSequence>>
@@ -127,9 +127,9 @@
   private static class NonAvroMapper
     extends MapReduceBase
     implements Mapper<LongWritable,Text,AvroKey<Long>,AvroValue<Utf8>> {
-    
-    public void map(LongWritable key, Text value, 
-                  OutputCollector<AvroKey<Long>,AvroValue<Utf8>> out, 
+
+    public void map(LongWritable key, Text value,
+                  OutputCollector<AvroKey<Long>,AvroValue<Utf8>> out,
                   Reporter reporter) throws IOException {
       out.collect(new AvroKey<Long>(key.get()),
                   new AvroValue<Utf8>(new Utf8(value.toString())));
@@ -142,7 +142,7 @@
     Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");
 
     output.getFileSystem(job).delete(output);
-    
+
     // configure input for non-Avro sequence file
     job.setInputFormat(SequenceFileInputFormat.class);
     FileInputFormat.setInputPaths(job, FILE.toURI().toString());
@@ -166,9 +166,9 @@
   private static class NonAvroOnlyMapper
     extends MapReduceBase
     implements Mapper<LongWritable,Text,AvroWrapper<Pair<Long,Utf8>>,NullWritable> {
-    
-    public void map(LongWritable key, Text value, 
-                    OutputCollector<AvroWrapper<Pair<Long,Utf8>>,NullWritable> out, 
+
+    public void map(LongWritable key, Text value,
+                    OutputCollector<AvroWrapper<Pair<Long,Utf8>>,NullWritable> out,
                     Reporter reporter) throws IOException {
       out.collect(new AvroWrapper<Pair<Long,Utf8>>(new Pair<Long,Utf8>(key.get(), new Utf8(value.toString()))),
                   NullWritable.get());
@@ -181,7 +181,7 @@
     Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");
 
     output.getFileSystem(job).delete(output);
-    
+
 
     // configure input for non-Avro sequence file
     job.setInputFormat(SequenceFileInputFormat.class);
@@ -205,9 +205,9 @@
   private static class NonAvroReducer
     extends MapReduceBase
     implements Reducer<AvroKey<Long>,AvroValue<Utf8>,LongWritable,Text> {
-    
+
     public void reduce(AvroKey<Long> key, Iterator<AvroValue<Utf8>> values,
-                       OutputCollector<LongWritable, Text> out, 
+                       OutputCollector<LongWritable, Text> out,
                        Reporter reporter) throws IOException {
       while (values.hasNext()) {
         AvroValue<Utf8> value = values.next();
@@ -223,7 +223,7 @@
     Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");
 
     output.getFileSystem(job).delete(output);
-    
+
     // configure input for Avro from sequence file
     AvroJob.setInputSequenceFile(job);
     AvroJob.setInputSchema(job, SCHEMA);
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java
index c32c403..49ac89f 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java
@@ -68,18 +68,18 @@
     String inDir = System.getProperty("share.dir","../../../share")+"/test/data";
     Path input = new Path(inDir+"/weather.avro");
     Path output = new Path(System.getProperty("test.dir","target/test")+"/weather-ident");
-    
+
     output.getFileSystem(job).delete(output);
-    
+
     job.setJobName("identity map weather");
-    
+
     AvroJob.setInputSchema(job, Weather.SCHEMA$);
     AvroJob.setOutputSchema(job, Weather.SCHEMA$);
 
     FileInputFormat.setInputPaths(job, input);
     FileOutputFormat.setOutputPath(job, output);
     FileOutputFormat.setCompressOutput(job, true);
-    
+
     job.setNumReduceTasks(0);                     // map-only
 
     JobClient.runJob(job);
@@ -145,24 +145,24 @@
     String inDir = System.getProperty("share.dir","../../../share")+"/test/data";
     Path input = new Path(inDir+"/weather.avro");
     Path output = new Path(System.getProperty("test.dir","target/test")+"/weather-sort");
-    
+
     output.getFileSystem(job).delete(output);
-    
+
     job.setJobName("sort weather");
-    
+
     AvroJob.setInputSchema(job, Weather.SCHEMA$);
     AvroJob.setMapOutputSchema
       (job, Pair.getPairSchema(Weather.SCHEMA$, Schema.create(Type.NULL)));
     AvroJob.setOutputSchema(job, Weather.SCHEMA$);
-    
-    AvroJob.setMapperClass(job, SortMapper.class);        
+
+    AvroJob.setMapperClass(job, SortMapper.class);
     AvroJob.setReducerClass(job, SortReducer.class);
 
     FileInputFormat.setInputPaths(job, input);
     FileOutputFormat.setOutputPath(job, output);
     FileOutputFormat.setCompressOutput(job, true);
     AvroJob.setOutputCodec(job, SNAPPY_CODEC);
-    
+
     JobClient.runJob(job);
 
     // check output is correct
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
index 4e729dc..a0b61cf 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
@@ -49,7 +49,7 @@
         collector.collect(new Pair<Utf8,Long>(new Utf8(tokens.nextToken()),1L));
     }
   }
-  
+
   public static class ReduceImpl
     extends AvroReducer<Utf8, Long, Pair<Utf8, Long> > {
     @Override
@@ -61,7 +61,7 @@
         sum += count;
       collector.collect(new Pair<Utf8,Long>(word, sum));
     }
-  }    
+  }
 
   @Test public void runTestsInOrder() throws Exception {
     testJob();
@@ -73,59 +73,59 @@
     JobConf job = new JobConf();
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out");
-    
+
     outputPath.getFileSystem(job).delete(outputPath);
     WordCountUtil.writeLinesFile();
-    
+
     job.setJobName("wordcount");
-    
+
     AvroJob.setInputSchema(job, Schema.create(Schema.Type.STRING));
     AvroJob.setOutputSchema(job,
                             new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema());
-    
-    AvroJob.setMapperClass(job, MapImpl.class);        
+
+    AvroJob.setMapperClass(job, MapImpl.class);
     AvroJob.setCombinerClass(job, ReduceImpl.class);
     AvroJob.setReducerClass(job, ReduceImpl.class);
-    
+
     FileInputFormat.setInputPaths(job, new Path(dir + "/in"));
     FileOutputFormat.setOutputPath(job, outputPath);
     FileOutputFormat.setCompressOutput(job, true);
-    
+
     WordCountUtil.setMeta(job);
 
     JobClient.runJob(job);
-    
+
     WordCountUtil.validateCountsFile();
   }
-  
+
   @SuppressWarnings("deprecation")
   public void testProjection() throws Exception {
     JobConf job = new JobConf();
-    
+
     Integer defaultRank = new Integer(-1);
-    
-    String jsonSchema = 
+
+    String jsonSchema =
       "{\"type\":\"record\"," +
       "\"name\":\"org.apache.avro.mapred.Pair\","+
-      "\"fields\": [ " + 
+      "\"fields\": [ " +
         "{\"name\":\"rank\", \"type\":\"int\", \"default\": -1}," +
-        "{\"name\":\"value\", \"type\":\"long\"}" + 
+        "{\"name\":\"value\", \"type\":\"long\"}" +
       "]}";
-    
+
     Schema readerSchema = Schema.parse(jsonSchema);
-    
+
     AvroJob.setInputSchema(job, readerSchema);
-    
+
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path inputPath = new Path(dir + "/out" + "/part-00000" + AvroOutputFormat.EXT);
     FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
     FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
-    
+
     AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<Pair<Integer, Long>>(job, fileSplit);
-    
+
     AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<Pair<Integer, Long>>(null);
     NullWritable ignore = NullWritable.get();
-    
+
     long sumOfCounts = 0;
     long numOfCounts = 0;
     while(recordReader.next(inputPair, ignore)) {
@@ -133,14 +133,14 @@
       sumOfCounts += (Long) inputPair.datum().get(1);
       numOfCounts++;
     }
-    
+
     Assert.assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
-    
+
     long actualSumOfCounts = 0;
     for(Long count : WordCountUtil.COUNTS.values()) {
       actualSumOfCounts += count;
     }
-    
+
     Assert.assertEquals(sumOfCounts, actualSumOfCounts);
   }
 
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java
index af60f90..8d88bd3 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java
@@ -104,7 +104,7 @@
       out.append(ByteBuffer.wrap(line.getBytes("UTF-8")));
     out.close();
   }
-  
+
   public static void writeLinesTextFile() throws IOException {
     FileUtil.fullyDelete(DIR);
     LINES_FILE.getParentFile().mkdirs();
@@ -130,7 +130,7 @@
     in.close();
     assertEquals(COUNTS.size(), numWords);
   }
-  
+
   public static void validateSortedFile() throws Exception {
     DatumReader<ByteBuffer> reader = new GenericDatumReader<ByteBuffer>();
     InputStream in = new BufferedInputStream(
@@ -150,12 +150,12 @@
     }
     assertFalse(lines.hasNext());
   }
-  
+
   // metadata tests
   private static final String STRING_KEY = "string-key";
   private static final String LONG_KEY = "long-key";
   private static final String BYTES_KEY = "bytes-key";
-  
+
   private static final String STRING_META_VALUE = "value";
   private static final long LONG_META_VALUE = 666;
   private static final byte[] BYTES_META_VALUE
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
index b551ef9..f3eb638 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
@@ -77,7 +77,7 @@
     private Buffer buffer = new Buffer();
     private BinaryEncoder encoder = new EncoderFactory()
         .configureBlockSize(512).binaryEncoder(buffer, null);
-    
+
     private Collector(Schema schema) {
       this.writer = new SpecificDatumWriter<T>(schema);
     }
@@ -89,7 +89,7 @@
       encoder.flush();
       outputClient.output(buffer.data());
     }
-    
+
     /** Collect a pre-partitioned map output value. */
     public void collect(T record, int partition) throws IOException {
       buffer.reset();
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java
index 48cae25..58df52c 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java
@@ -31,7 +31,7 @@
  * functions for word count. */
 public class WordCountTask
   extends TetherTask<Utf8,Pair<Utf8,Long>,Pair<Utf8,Long>> {
-  
+
   static final Logger LOG = LoggerFactory.getLogger(WordCountTask.class);
   @Override public void map(Utf8 text, Collector<Pair<Utf8,Long>> collector)
     throws IOException {
@@ -39,14 +39,14 @@
     while (tokens.hasMoreTokens())
       collector.collect(new Pair<Utf8,Long>(new Utf8(tokens.nextToken()),1L));
   }
-  
+
   private long sum;
 
   @Override public void reduce(Pair<Utf8,Long> wc,
                                Collector<Pair<Utf8,Long>> c) {
     sum += wc.value();
   }
-    
+
   @Override public void reduceFlush(Pair<Utf8,Long> wc, Collector<Pair<Utf8,Long>> c)
     throws IOException {
     wc.value(sum);
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
index a867f02..a2779a0 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
@@ -82,7 +82,7 @@
 
     dataFileReader.close();
   }
-  
+
   @Test
   public void testSycnableWrite() throws IOException {
     Schema writerSchema = Schema.create(Schema.Type.INT);
@@ -105,20 +105,20 @@
     verify(context);
 
     // Verify that the file was written as expected.
-	Configuration conf = new Configuration();
-	conf.set("fs.default.name", "file:///");
-	Path avroFile = new Path("target/temp.avro");
-	DataFileReader<GenericData.Record> dataFileReader = new DataFileReader<GenericData.Record>(new FsInput(avroFile,
-			conf), new SpecificDatumReader<GenericData.Record>());
+    Configuration conf = new Configuration();
+    conf.set("fs.default.name", "file:///");
+    Path avroFile = new Path("target/temp.avro");
+    DataFileReader<GenericData.Record> dataFileReader = new DataFileReader<GenericData.Record>(new FsInput(avroFile,
+      conf), new SpecificDatumReader<GenericData.Record>());
 
     dataFileReader.seek(positionTwo);
     assertTrue(dataFileReader.hasNext());  // Record 2.
     assertEquals(2, dataFileReader.next());
 
-	dataFileReader.seek(positionOne);
+    dataFileReader.seek(positionOne);
     assertTrue(dataFileReader.hasNext());  // Record 1.
     assertEquals(1, dataFileReader.next());
-    
+
     dataFileReader.close();
-  }  
+  }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
index 1cd1ded..2de056f 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
@@ -168,7 +168,7 @@
     assertEquals("reflectionData", firstRecord.getKey().toString());
     assertEquals(record.attribute, firstRecord.getValue().attribute);
   }
- 
+
   @Test
   public void testSyncableWriteRecords() throws IOException {
     Job job = new Job();
@@ -202,14 +202,14 @@
 
     verify(context);
 
-	Configuration conf = new Configuration();
-	conf.set("fs.default.name", "file:///");
-	Path avroFile = new Path("target/temp.avro");
-	DataFileReader<GenericData.Record> avroFileReader = new DataFileReader<GenericData.Record>(new FsInput(avroFile,
-			conf), new SpecificDatumReader<GenericData.Record>());
-    
-	
-	avroFileReader.seek(pointTwo);
+    Configuration conf = new Configuration();
+    conf.set("fs.default.name", "file:///");
+    Path avroFile = new Path("target/temp.avro");
+    DataFileReader<GenericData.Record> avroFileReader = new DataFileReader<GenericData.Record>(new FsInput(avroFile,
+      conf), new SpecificDatumReader<GenericData.Record>());
+
+
+    avroFileReader.seek(pointTwo);
     // Verify that the second record was written;
     assertTrue(avroFileReader.hasNext());
     AvroKeyValue<CharSequence, TextStats> secondRecord
@@ -218,8 +218,8 @@
     assertEquals("banana", secondRecord.getKey().toString());
     assertEquals("banana", secondRecord.getValue().name.toString());
 
-    
-	avroFileReader.seek(pointOne);
+
+    avroFileReader.seek(pointOne);
     // Verify that the first record was written.
     assertTrue(avroFileReader.hasNext());
     AvroKeyValue<CharSequence, TextStats> firstRecord
@@ -231,5 +231,5 @@
 
     // That's all, folks.
     avroFileReader.close();
-  }  
+  }
 }
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java
index 83cc8ee..13c7cc1 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java
@@ -59,10 +59,10 @@
       Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
           + "\"fields\":[{\"name\":\"count\",\"type\":\"int\"},"
           + "{\"name\":\"name\",\"type\":\"string\"}]}");
-  public static final Schema STATS_SCHEMA_2 = 
+  public static final Schema STATS_SCHEMA_2 =
       Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
           + "\"fields\":[{\"name\":\"count1\",\"type\":\"int\"},"
-          + "{\"name\":\"name1\",\"type\":\"string\"}]}");  
+          + "{\"name\":\"name1\",\"type\":\"string\"}]}");
 
   private static class LineCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
     private IntWritable mOne;
@@ -122,17 +122,17 @@
       record.put("name", new Utf8(line.toString()));
       record.put("count", new Integer(sum));
       mStats.datum(record);
-      context.write(mStats, NullWritable.get()); 
+      context.write(mStats, NullWritable.get());
       amos.write("myavro",mStats,NullWritable.get());
       record2.put("name1", new Utf8(line.toString()));
       record2.put("count1", new Integer(sum));
-      mStats.datum(record2); 
+      mStats.datum(record2);
       amos.write(mStats, NullWritable.get(), STATS_SCHEMA_2, null, "testnewwrite2");
       amos.write("myavro1",mStats);
       amos.write(mStats, NullWritable.get(), STATS_SCHEMA, null, "testnewwrite");
       amos.write(mStats, NullWritable.get(), "testwritenonschema");
     }
-   
+
     @Override
     protected void cleanup(Context context) throws IOException,InterruptedException
     {
@@ -202,9 +202,9 @@
     job.setMapOutputValueClass(IntWritable.class);
 
     job.setReducerClass(GenericStatsReducer.class);
-    AvroJob.setOutputKeySchema(job, STATS_SCHEMA);    
+    AvroJob.setOutputKeySchema(job, STATS_SCHEMA);
     AvroMultipleOutputs.addNamedOutput(job,"myavro",AvroKeyOutputFormat.class,STATS_SCHEMA,null);
-    AvroMultipleOutputs.addNamedOutput(job,"myavro1", AvroKeyOutputFormat.class, STATS_SCHEMA_2); 
+    AvroMultipleOutputs.addNamedOutput(job,"myavro1", AvroKeyOutputFormat.class, STATS_SCHEMA_2);
     job.setOutputFormatClass(AvroKeyOutputFormat.class);
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out");
@@ -244,7 +244,7 @@
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-  
+
     outputFiles = fileSystem.globStatus(outputPath.suffix("/testnewwrite-r-00000.avro"));
     Assert.assertEquals(1, outputFiles.length);
     reader = new DataFileReader<GenericData.Record>(
@@ -255,11 +255,11 @@
        counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count"));
     }
     reader.close();
-    
+
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-        
+
     outputFiles = fileSystem.globStatus(outputPath.suffix("/testnewwrite2-r-00000.avro"));
     Assert.assertEquals(1, outputFiles.length);
     reader = new DataFileReader<GenericData.Record>(
@@ -273,7 +273,7 @@
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-    
+
     outputFiles = fileSystem.globStatus(outputPath.suffix("/testwritenonschema-r-00000.avro"));
     Assert.assertEquals(1, outputFiles.length);
     reader = new DataFileReader<GenericData.Record>(
@@ -288,8 +288,8 @@
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-    
-    
+
+
   }
 
   @Test
@@ -312,7 +312,7 @@
     job.setOutputFormatClass(AvroKeyOutputFormat.class);
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out-specific");
-    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath); 
+    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath);
     FileOutputFormat.setOutputPath(job, outputPath);
 
     Assert.assertTrue(job.waitForCompletion(true));
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
index 8298984..058f73b 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
@@ -56,10 +56,10 @@
       Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
           + "\"fields\":[{\"name\":\"count\",\"type\":\"int\"},"
           + "{\"name\":\"name\",\"type\":\"string\"}]}");
-  public static final Schema STATS_SCHEMA_2 = 
+  public static final Schema STATS_SCHEMA_2 =
       Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
           + "\"fields\":[{\"name\":\"count1\",\"type\":\"int\"},"
-          + "{\"name\":\"name1\",\"type\":\"string\"}]}");  
+          + "{\"name\":\"name1\",\"type\":\"string\"}]}");
 
   private static class LineCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
     private IntWritable mOne;
@@ -124,14 +124,14 @@
       amos.write("myavro",mStats,NullWritable.get());
       record2.put("name1", new Utf8(line.toString()));
       record2.put("count1", new Integer(sum));
-      mStats.datum(record2); 
+      mStats.datum(record2);
       amos.write(mStats, NullWritable.get(), STATS_SCHEMA_2, null, "testnewwrite2");
       amos.sync("myavro1","myavro1");
       amos.write("myavro1",mStats);
       amos.write(mStats, NullWritable.get(), STATS_SCHEMA, null, "testnewwrite");
       amos.write(mStats, NullWritable.get(), "testwritenonschema");
     }
-   
+
     @Override
     protected void cleanup(Context context) throws IOException,InterruptedException
     {
@@ -202,9 +202,9 @@
     job.setMapOutputValueClass(IntWritable.class);
 
     job.setReducerClass(GenericStatsReducer.class);
-    AvroJob.setOutputKeySchema(job, STATS_SCHEMA);    
+    AvroJob.setOutputKeySchema(job, STATS_SCHEMA);
     AvroMultipleOutputs.addNamedOutput(job,"myavro",AvroKeyOutputFormat.class,STATS_SCHEMA,null);
-    AvroMultipleOutputs.addNamedOutput(job,"myavro1", AvroKeyOutputFormat.class, STATS_SCHEMA_2); 
+    AvroMultipleOutputs.addNamedOutput(job,"myavro1", AvroKeyOutputFormat.class, STATS_SCHEMA_2);
     job.setOutputFormatClass(AvroKeyOutputFormat.class);
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out");
@@ -244,7 +244,7 @@
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-  
+
     outputFiles = fileSystem.globStatus(outputPath.suffix("/testnewwrite-r-00000.avro"));
     Assert.assertEquals(1, outputFiles.length);
     reader = new DataFileReader<GenericData.Record>(
@@ -255,11 +255,11 @@
        counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count"));
     }
     reader.close();
-    
+
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-        
+
     outputFiles = fileSystem.globStatus(outputPath.suffix("/testnewwrite2-r-00000.avro"));
     Assert.assertEquals(1, outputFiles.length);
     reader = new DataFileReader<GenericData.Record>(
@@ -273,7 +273,7 @@
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-    
+
     outputFiles = fileSystem.globStatus(outputPath.suffix("/testwritenonschema-r-00000.avro"));
     Assert.assertEquals(1, outputFiles.length);
     reader = new DataFileReader<GenericData.Record>(
@@ -288,8 +288,8 @@
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-    
-    
+
+
   }
 
   @Test
@@ -312,7 +312,7 @@
     job.setOutputFormatClass(AvroKeyOutputFormat.class);
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out-specific");
-    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath); 
+    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath);
     FileOutputFormat.setOutputPath(job, outputPath);
 
     Assert.assertTrue(job.waitForCompletion(true));
diff --git a/lang/java/maven-plugin/pom.xml b/lang/java/maven-plugin/pom.xml
index 93811e5..e092221 100644
--- a/lang/java/maven-plugin/pom.xml
+++ b/lang/java/maven-plugin/pom.xml
@@ -40,7 +40,7 @@
   <properties>
     <pluginTestingVersion>1.3</pluginTestingVersion>
   </properties>
-  
+
   <build>
     <plugins>
       <plugin>
diff --git a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java
index a5d8b31..83db1c0 100644
--- a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java
+++ b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java
@@ -76,15 +76,15 @@
    * A list of files or directories that should be compiled first thus making
    * them importable by subsequently compiled schemas. Note that imported files
    * should not reference each other.
-   * @parameter 
+   * @parameter
    */
   protected String[] imports;
-  
+
   /**
    * A set of Ant-like exclusion patterns used to prevent certain files from
    * being processed. By default, this set is empty such that no files are
    * excluded.
-   * 
+   *
    * @parameter
    */
   protected String[] excludes = new String[0];
@@ -93,7 +93,7 @@
    * A set of Ant-like exclusion patterns used to prevent certain files from
    * being processed. By default, this set is empty such that no files are
    * excluded.
-   * 
+   *
    * @parameter
    */
   protected String[] testExcludes = new String[0];
@@ -124,7 +124,7 @@
 
   /**
    * The current Maven project.
-   * 
+   *
    * @parameter default-value="${project}"
    * @readonly
    * @required
@@ -164,11 +164,11 @@
           sourceDirectory.getAbsolutePath(), excludes, getIncludes());
       compileFiles(includedFiles, sourceDirectory, outputDirectory);
     }
-    
+
     if (hasImports || hasSourceDir) {
       project.addCompileSourceRoot(outputDirectory.getAbsolutePath());
     }
-    
+
     if (hasTestDir) {
       String[] includedFiles = getIncludedFiles(
           testSourceDirectory.getAbsolutePath(), testExcludes,
@@ -184,7 +184,7 @@
     FileSet fs = new FileSet();
     fs.setDirectory(absPath);
     fs.setFollowSymlinks(false);
-    
+
     //exclude imports directory since it has already been compiled.
     if (imports != null) {
       String importExclude = null;
diff --git a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java
index 0e9ef9c..48f9050 100644
--- a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java
+++ b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java
@@ -35,7 +35,7 @@
 
 /**
  * Generate Java classes and interfaces from AvroIDL files (.avdl)
- * 
+ *
  * @goal idl-protocol
  * @requiresDependencyResolution runtime
  * @phase generate-sources
@@ -46,16 +46,16 @@
    * A set of Ant-like inclusion patterns used to select files from the source
    * directory for processing. By default, the pattern
    * <code>**&#47;*.avdl</code> is used to select IDL files.
-   * 
+   *
    * @parameter
    */
   private String[] includes = new String[] { "**/*.avdl" };
-  
+
   /**
    * A set of Ant-like inclusion patterns used to select files from the source
    * directory for processing. By default, the pattern
    * <code>**&#47;*.avdl</code> is used to select IDL files.
-   * 
+   *
    * @parameter
    */
   private String[] testIncludes = new String[] { "**/*.avdl" };
diff --git a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java
index 461559b..d49ec8c 100644
--- a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java
+++ b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java
@@ -28,7 +28,7 @@
 
 /**
  * Generate Java classes and interfaces from Avro protocol files (.avpr)
- * 
+ *
  * @goal protocol
  * @phase generate-sources
  * @threadSafe
@@ -38,7 +38,7 @@
    * A set of Ant-like inclusion patterns used to select files from the source
    * directory for processing. By default, the pattern
    * <code>**&#47;*.avpr</code> is used to select grammar files.
-   * 
+   *
    * @parameter
    */
   private String[] includes = new String[] { "**/*.avpr" };
@@ -47,11 +47,11 @@
    * A set of Ant-like inclusion patterns used to select files from the source
    * directory for processing. By default, the pattern
    * <code>**&#47;*.avpr</code> is used to select grammar files.
-   * 
+   *
    * @parameter
    */
   private String[] testIncludes = new String[] { "**/*.avpr" };
-  
+
   @Override
   protected void doCompile(String filename, File sourceDirectory, File outputDirectory) throws IOException {
     File src = new File(sourceDirectory, filename);
diff --git a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java
index 7a7eaf9..6fc3d8d 100644
--- a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java
+++ b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java
@@ -28,32 +28,32 @@
 
 /**
  * Generate Java classes from Avro schema files (.avsc)
- * 
+ *
  * @goal schema
  * @phase generate-sources
  * @threadSafe
  */
 public class SchemaMojo extends AbstractAvroMojo {
   /**
-   * A parser used to parse all schema files. Using a common parser will 
+   * A parser used to parse all schema files. Using a common parser will
    * facilitate the import of external schemas.
    */
    private Schema.Parser schemaParser = new Schema.Parser();
-  
+
    /**
    * A set of Ant-like inclusion patterns used to select files from the source
    * directory for processing. By default, the pattern
    * <code>**&#47;*.avsc</code> is used to select grammar files.
-   * 
+   *
    * @parameter
    */
   private String[] includes = new String[] { "**/*.avsc" };
-  
+
   /**
    * A set of Ant-like inclusion patterns used to select files from the source
    * directory for processing. By default, the pattern
    * <code>**&#47;*.avsc</code> is used to select grammar files.
-   * 
+   *
    * @parameter
    */
   private String[] testIncludes = new String[] { "**/*.avsc" };
@@ -63,7 +63,7 @@
     File src = new File(sourceDirectory, filename);
     Schema schema;
 
-    // This is necessary to maintain backward-compatibility. If there are  
+    // This is necessary to maintain backward-compatibility. If there are
     // no imported files then isolate the schemas from each other, otherwise
     // allow them to share a single schema so resuse and sharing of schema
     // is possible.
@@ -72,7 +72,7 @@
     } else {
       schema = schemaParser.parse(src);
     }
-    
+
     SpecificCompiler compiler = new SpecificCompiler(schema);
     compiler.setTemplateDir(templateDirectory);
     compiler.setStringType(StringType.valueOf(stringType));
diff --git a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java
index e6f2091..2348781 100644
--- a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java
+++ b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java
@@ -41,7 +41,7 @@
 
   /**
    * Assert the existence files in the given given directory.
-   * 
+   *
    * @param directory the directory being checked
    * @param files the files whose existence is being checked.
    */
diff --git a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLProtocolMojo.java b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLProtocolMojo.java
index 6442bc8..9316872 100644
--- a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLProtocolMojo.java
+++ b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLProtocolMojo.java
@@ -21,7 +21,7 @@
 
 /**
  * Test the IDL Protocol Mojo.
- * 
+ *
  * @author saden
  */
 public class TestIDLProtocolMojo extends AbstractAvroMojoTest {
diff --git a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java
index 95ebbc6..342c155 100644
--- a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java
+++ b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java
@@ -21,7 +21,7 @@
 
 /**
  * Test the Protocol Mojo.
- * 
+ *
  * @author saden
  */
 public class TestProtocolMojo extends AbstractAvroMojoTest {
diff --git a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java
index 8a47be0..3e7fe74 100644
--- a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java
+++ b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java
@@ -21,7 +21,7 @@
 
 /**
  * Test the Schema Mojo.
- * 
+ *
  * @author saden
  */
 public class TestSchemaMojo extends AbstractAvroMojoTest {
diff --git a/lang/java/maven-plugin/src/test/resources/unit/idl/pom.xml b/lang/java/maven-plugin/src/test/resources/unit/idl/pom.xml
index ec6b9d8..69c504b 100644
--- a/lang/java/maven-plugin/src/test/resources/unit/idl/pom.xml
+++ b/lang/java/maven-plugin/src/test/resources/unit/idl/pom.xml
@@ -28,7 +28,7 @@
   <packaging>jar</packaging>
 
   <name>testproject</name>
- 
+
   <build>
     <plugins>
       <plugin>
@@ -59,7 +59,7 @@
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-mapper-asl</artifactId>
       <version>1.9.10</version>
-    </dependency> 
+    </dependency>
   </dependencies>
-    
+
 </project>
diff --git a/lang/java/maven-plugin/src/test/resources/unit/protocol/pom.xml b/lang/java/maven-plugin/src/test/resources/unit/protocol/pom.xml
index 341bb1a..b484e3d 100644
--- a/lang/java/maven-plugin/src/test/resources/unit/protocol/pom.xml
+++ b/lang/java/maven-plugin/src/test/resources/unit/protocol/pom.xml
@@ -28,7 +28,7 @@
   <packaging>jar</packaging>
 
   <name>testproject</name>
- 
+
   <build>
     <plugins>
       <plugin>
@@ -59,7 +59,7 @@
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-mapper-asl</artifactId>
       <version>1.9.10</version>
-    </dependency> 
+    </dependency>
   </dependencies>
-    
+
 </project>
diff --git a/lang/java/maven-plugin/src/test/resources/unit/schema/pom.xml b/lang/java/maven-plugin/src/test/resources/unit/schema/pom.xml
index 0faef29..cc000df 100644
--- a/lang/java/maven-plugin/src/test/resources/unit/schema/pom.xml
+++ b/lang/java/maven-plugin/src/test/resources/unit/schema/pom.xml
@@ -18,12 +18,12 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
-  
+
   <artifactId>avro-maven-plugin-test</artifactId>
   <packaging>jar</packaging>
 
   <name>testproject</name>
- 
+
   <build>
     <plugins>
       <plugin>
@@ -58,7 +58,7 @@
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-mapper-asl</artifactId>
       <version>1.9.10</version>
-    </dependency> 
+    </dependency>
   </dependencies>
-    
+
 </project>
diff --git a/lang/java/pom.xml b/lang/java/pom.xml
index 588a6b7..c315ea4 100644
--- a/lang/java/pom.xml
+++ b/lang/java/pom.xml
@@ -37,7 +37,7 @@
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
 
     <!-- version properties for dependencies -->
-    
+
     <!--
       To build the avro-mapred module against Hadoop 1 specify
       -Dhadoop.version=1 or leave unspecified to build against Hadoop 2
@@ -387,7 +387,7 @@
         <plugins>
           <plugin>
             <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-surefire-plugin</artifactId> 
+            <artifactId>maven-surefire-plugin</artifactId>
             <configuration>
               <systemPropertyVariables>
                 <test.dir>${project.basedir}/target/</test.dir>
diff --git a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java
index dabe15a..fa8239b 100644
--- a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java
+++ b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java
@@ -58,7 +58,7 @@
   private static final ProtobufData INSTANCE = new ProtobufData();
 
   protected ProtobufData() {}
-  
+
   /** Return the singleton instance. */
   public static ProtobufData get() { return INSTANCE; }
 
@@ -108,7 +108,7 @@
     default:
       return m.getField(f);
     }
-  }    
+  }
 
   private final Map<Descriptor,FieldDescriptor[]> fieldCache =
     new ConcurrentHashMap<Descriptor,FieldDescriptor[]>();
@@ -203,7 +203,7 @@
                             false);
 
       seen.put(descriptor, result);
-        
+
       List<Field> fields = new ArrayList<Field>();
       for (FieldDescriptor f : descriptor.getFields())
         fields.add(new Field(f.getName(), getSchema(f), null, getDefault(f)));
@@ -342,7 +342,7 @@
     default:
       throw new RuntimeException("Unexpected type: "+f.getType());
     }
-    
+
   }
 
 }
diff --git a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java
index 0b9bf8a..3ae0ee9 100644
--- a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java
+++ b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java
@@ -58,7 +58,7 @@
   }
 
   @Override
-  protected Object readRecord(Object old, Schema expected, 
+  protected Object readRecord(Object old, Schema expected,
                               ResolvingDecoder in) throws IOException {
     Message.Builder b = (Message.Builder)super.readRecord(old, expected, in);
     return b.build();                             // build instance
@@ -78,7 +78,7 @@
   @Override
   protected Object readBytes(Object old, Decoder in) throws IOException {
     return ByteString.copyFrom(((ByteBuffer)super.readBytes(old, in)).array());
-  }    
+  }
 
 }
 
diff --git a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java
index 0af30ad..58fab3b 100644
--- a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java
+++ b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java
@@ -36,19 +36,19 @@
   public ProtobufDatumWriter(Class<T> c) {
     super(ProtobufData.get().getSchema(c), ProtobufData.get());
   }
-  
+
   public ProtobufDatumWriter(Schema schema) {
     super(schema, ProtobufData.get());
   }
-  
+
   protected ProtobufDatumWriter(Schema root, ProtobufData protobufData) {
     super(root, protobufData);
   }
-  
+
   protected ProtobufDatumWriter(ProtobufData protobufData) {
     super(protobufData);
   }
-  
+
   @Override
   protected void writeEnum(Schema schema, Object datum, Encoder out)
     throws IOException {
@@ -61,7 +61,7 @@
 
   @Override
   protected void writeBytes(Object datum, Encoder out) throws IOException {
-    ByteString bytes = (ByteString)datum; 
+    ByteString bytes = (ByteString)datum;
     out.writeBytes(bytes.toByteArray(), 0, bytes.size());
   }
 
diff --git a/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/Test.java b/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/Test.java
index 806b5dc..e8e7a29 100644
--- a/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/Test.java
+++ b/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/Test.java
@@ -309,7 +309,7 @@
     /**
      * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
      */
-    java.util.List<org.apache.avro.protobuf.Test.Foo> 
+    java.util.List<org.apache.avro.protobuf.Test.Foo>
         getFooArrayList();
     /**
      * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
@@ -322,7 +322,7 @@
     /**
      * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
      */
-    java.util.List<? extends org.apache.avro.protobuf.Test.FooOrBuilder> 
+    java.util.List<? extends org.apache.avro.protobuf.Test.FooOrBuilder>
         getFooArrayOrBuilderList();
     /**
      * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
@@ -864,7 +864,7 @@
       if (ref instanceof java.lang.String) {
         return (java.lang.String) ref;
       } else {
-        com.google.protobuf.ByteString bs = 
+        com.google.protobuf.ByteString bs =
             (com.google.protobuf.ByteString) ref;
         java.lang.String s = bs.toStringUtf8();
         if (bs.isValidUtf8()) {
@@ -880,7 +880,7 @@
         getStringBytes() {
       java.lang.Object ref = string_;
       if (ref instanceof java.lang.String) {
-        com.google.protobuf.ByteString b = 
+        com.google.protobuf.ByteString b =
             com.google.protobuf.ByteString.copyFromUtf8(
                 (java.lang.String) ref);
         string_ = b;
@@ -969,7 +969,7 @@
     /**
      * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
      */
-    public java.util.List<? extends org.apache.avro.protobuf.Test.FooOrBuilder> 
+    public java.util.List<? extends org.apache.avro.protobuf.Test.FooOrBuilder>
         getFooArrayOrBuilderList() {
       return fooArray_;
     }
@@ -1637,7 +1637,7 @@
               fooArrayBuilder_ = null;
               fooArray_ = other.fooArray_;
               bitField0_ = (bitField0_ & ~0x00020000);
-              fooArrayBuilder_ = 
+              fooArrayBuilder_ =
                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                    getFooArrayFieldBuilder() : null;
             } else {
@@ -1664,18 +1664,18 @@
 
       public final boolean isInitialized() {
         if (!hasInt32()) {
-          
+
           return false;
         }
         for (int i = 0; i < getFooArrayCount(); i++) {
           if (!getFooArray(i).isInitialized()) {
-            
+
             return false;
           }
         }
         if (hasFoo()) {
           if (!getFoo().isInitialized()) {
-            
+
             return false;
           }
         }
@@ -2175,7 +2175,7 @@
           getStringBytes() {
         java.lang.Object ref = string_;
         if (ref instanceof String) {
-          com.google.protobuf.ByteString b = 
+          com.google.protobuf.ByteString b =
               com.google.protobuf.ByteString.copyFromUtf8(
                   (java.lang.String) ref);
           string_ = b;
@@ -2581,7 +2581,7 @@
       /**
        * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
        */
-      public java.util.List<? extends org.apache.avro.protobuf.Test.FooOrBuilder> 
+      public java.util.List<? extends org.apache.avro.protobuf.Test.FooOrBuilder>
            getFooArrayOrBuilderList() {
         if (fooArrayBuilder_ != null) {
           return fooArrayBuilder_.getMessageOrBuilderList();
@@ -2607,12 +2607,12 @@
       /**
        * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
        */
-      public java.util.List<org.apache.avro.protobuf.Test.Foo.Builder> 
+      public java.util.List<org.apache.avro.protobuf.Test.Foo.Builder>
            getFooArrayBuilderList() {
         return getFooArrayFieldBuilder().getBuilderList();
       }
       private com.google.protobuf.RepeatedFieldBuilder<
-          org.apache.avro.protobuf.Test.Foo, org.apache.avro.protobuf.Test.Foo.Builder, org.apache.avro.protobuf.Test.FooOrBuilder> 
+          org.apache.avro.protobuf.Test.Foo, org.apache.avro.protobuf.Test.Foo.Builder, org.apache.avro.protobuf.Test.FooOrBuilder>
           getFooArrayFieldBuilder() {
         if (fooArrayBuilder_ == null) {
           fooArrayBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
@@ -2838,7 +2838,7 @@
        * </pre>
        */
       private com.google.protobuf.SingleFieldBuilder<
-          org.apache.avro.protobuf.Test.Foo, org.apache.avro.protobuf.Test.Foo.Builder, org.apache.avro.protobuf.Test.FooOrBuilder> 
+          org.apache.avro.protobuf.Test.Foo, org.apache.avro.protobuf.Test.Foo.Builder, org.apache.avro.protobuf.Test.FooOrBuilder>
           getFooFieldBuilder() {
         if (fooBuilder_ == null) {
           fooBuilder_ = new com.google.protobuf.SingleFieldBuilder<
diff --git a/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtobuf.java b/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtobuf.java
index 8cdfb81..f7d5436 100644
--- a/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtobuf.java
+++ b/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtobuf.java
@@ -75,7 +75,7 @@
     Encoder e = EncoderFactory.get().binaryEncoder(bao, null);
     w.write(foo, e);
     e.flush();
-    
+
     Object o = new ProtobufDatumReader<Foo>(Foo.class).read
       (null,
        DecoderFactory.get().createBinaryDecoder
diff --git a/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftData.java b/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftData.java
index c78e25f..16047ac 100644
--- a/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftData.java
+++ b/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftData.java
@@ -55,7 +55,7 @@
   private static final ThriftData INSTANCE = new ThriftData();
 
   protected ThriftData() {}
-  
+
   /** Return the singleton instance. */
   public static ThriftData get() { return INSTANCE; }
 
diff --git a/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftDatumWriter.java b/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftDatumWriter.java
index 9ef3aed..dd01c41 100644
--- a/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftDatumWriter.java
+++ b/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftDatumWriter.java
@@ -34,15 +34,15 @@
   public ThriftDatumWriter(Class<T> c) {
     super(ThriftData.get().getSchema(c), ThriftData.get());
   }
-  
+
   public ThriftDatumWriter(Schema schema) {
     super(schema, ThriftData.get());
   }
-  
+
   protected ThriftDatumWriter(Schema root, ThriftData thriftData) {
     super(root, thriftData);
   }
-  
+
   protected ThriftDatumWriter(ThriftData thriftData) {
     super(thriftData);
   }
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/E.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/E.java
index b9dbd78..899d113 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/E.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/E.java
@@ -33,7 +33,7 @@
    * Find a the enum type by its integer value, as defined in the Thrift IDL.
    * @return null if the value is not found.
    */
-  public static E findByValue(int value) { 
+  public static E findByValue(int value) {
     switch (value) {
       case 1:
         return X;
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Error.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Error.java
index 4dfd46d..1a23f0c 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Error.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Error.java
@@ -107,7 +107,7 @@
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.MESSAGE, new org.apache.thrift.meta_data.FieldMetaData("message", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.MESSAGE, new org.apache.thrift.meta_data.FieldMetaData("message", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Error.class, metaDataMap);
@@ -313,7 +313,7 @@
       while (true)
       {
         schemeField = iprot.readFieldBegin();
-        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
           break;
         }
         switch (schemeField.id) {
@@ -321,7 +321,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
               struct.message = iprot.readString();
               struct.setMessageIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java
index f731ccf..68adeb5 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java
@@ -347,7 +347,7 @@
 
       public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
         final org.apache.thrift.AsyncProcessFunction fcall = this;
-        return new AsyncMethodCallback<Void>() { 
+        return new AsyncMethodCallback<Void>() {
           public void onComplete(Void o) {
             ping_result result = new ping_result();
             try {
@@ -397,7 +397,7 @@
 
       public AsyncMethodCallback<Integer> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
         final org.apache.thrift.AsyncProcessFunction fcall = this;
-        return new AsyncMethodCallback<Integer>() { 
+        return new AsyncMethodCallback<Integer>() {
           public void onComplete(Integer o) {
             add_result result = new add_result();
             result.success = o;
@@ -449,7 +449,7 @@
 
       public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
         final org.apache.thrift.AsyncProcessFunction fcall = this;
-        return new AsyncMethodCallback<Void>() { 
+        return new AsyncMethodCallback<Void>() {
           public void onComplete(Void o) {
           }
           public void onError(Exception e) {
@@ -668,7 +668,7 @@
         while (true)
         {
           schemeField = iprot.readFieldBegin();
-          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
             break;
           }
           switch (schemeField.id) {
@@ -912,7 +912,7 @@
         while (true)
         {
           schemeField = iprot.readFieldBegin();
-          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
             break;
           }
           switch (schemeField.id) {
@@ -1039,9 +1039,9 @@
     public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
     static {
       Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-      tmpMap.put(_Fields.NUM1, new org.apache.thrift.meta_data.FieldMetaData("num1", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+      tmpMap.put(_Fields.NUM1, new org.apache.thrift.meta_data.FieldMetaData("num1", org.apache.thrift.TFieldRequirementType.DEFAULT,
           new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
-      tmpMap.put(_Fields.NUM2, new org.apache.thrift.meta_data.FieldMetaData("num2", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+      tmpMap.put(_Fields.NUM2, new org.apache.thrift.meta_data.FieldMetaData("num2", org.apache.thrift.TFieldRequirementType.DEFAULT,
           new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
       metaDataMap = Collections.unmodifiableMap(tmpMap);
       org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(add_args.class, metaDataMap);
@@ -1309,7 +1309,7 @@
         while (true)
         {
           schemeField = iprot.readFieldBegin();
-          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
             break;
           }
           switch (schemeField.id) {
@@ -1317,7 +1317,7 @@
               if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
                 struct.num1 = iprot.readI32();
                 struct.setNum1IsSet(true);
-              } else { 
+              } else {
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
@@ -1325,7 +1325,7 @@
               if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
                 struct.num2 = iprot.readI32();
                 struct.setNum2IsSet(true);
-              } else { 
+              } else {
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
@@ -1475,7 +1475,7 @@
     public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
     static {
       Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-      tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+      tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
           new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
       metaDataMap = Collections.unmodifiableMap(tmpMap);
       org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(add_result.class, metaDataMap);
@@ -1679,7 +1679,7 @@
         while (true)
         {
           schemeField = iprot.readFieldBegin();
-          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
             break;
           }
           switch (schemeField.id) {
@@ -1687,7 +1687,7 @@
               if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
                 struct.success = iprot.readI32();
                 struct.setSuccessIsSet(true);
-              } else { 
+              } else {
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
@@ -1949,7 +1949,7 @@
         while (true)
         {
           schemeField = iprot.readFieldBegin();
-          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
             break;
           }
           switch (schemeField.id) {
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/FooOrBar.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/FooOrBar.java
index 308ee45..7c56e6f 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/FooOrBar.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/FooOrBar.java
@@ -101,9 +101,9 @@
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.FOO, new org.apache.thrift.meta_data.FieldMetaData("foo", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.FOO, new org.apache.thrift.meta_data.FieldMetaData("foo", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.BAR, new org.apache.thrift.meta_data.FieldMetaData("bar", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.BAR, new org.apache.thrift.meta_data.FieldMetaData("bar", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(FooOrBar.class, metaDataMap);
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Nested.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Nested.java
index 482e4cc..fc10893 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Nested.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Nested.java
@@ -109,7 +109,7 @@
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.X, new org.apache.thrift.meta_data.FieldMetaData("x", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.X, new org.apache.thrift.meta_data.FieldMetaData("x", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Nested.class, metaDataMap);
@@ -313,7 +313,7 @@
       while (true)
       {
         schemeField = iprot.readFieldBegin();
-        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
           break;
         }
         switch (schemeField.id) {
@@ -321,7 +321,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
               struct.x = iprot.readI32();
               struct.setXIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Test.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Test.java
index 65c1b3f..2ee2efc 100644
--- a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Test.java
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Test.java
@@ -91,7 +91,7 @@
     LIST_FIELD((short)10, "listField"),
     SET_FIELD((short)11, "setField"),
     /**
-     * 
+     *
      * @see E
      */
     ENUM_FIELD((short)12, "enumField"),
@@ -196,41 +196,41 @@
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.BOOL_FIELD, new org.apache.thrift.meta_data.FieldMetaData("boolField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.BOOL_FIELD, new org.apache.thrift.meta_data.FieldMetaData("boolField", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
-    tmpMap.put(_Fields.BYTE_FIELD, new org.apache.thrift.meta_data.FieldMetaData("byteField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.BYTE_FIELD, new org.apache.thrift.meta_data.FieldMetaData("byteField", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BYTE)));
-    tmpMap.put(_Fields.BYTE_OPTIONAL_FIELD, new org.apache.thrift.meta_data.FieldMetaData("byteOptionalField", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+    tmpMap.put(_Fields.BYTE_OPTIONAL_FIELD, new org.apache.thrift.meta_data.FieldMetaData("byteOptionalField", org.apache.thrift.TFieldRequirementType.OPTIONAL,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BYTE)));
-    tmpMap.put(_Fields.I16_FIELD, new org.apache.thrift.meta_data.FieldMetaData("i16Field", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.I16_FIELD, new org.apache.thrift.meta_data.FieldMetaData("i16Field", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16)));
-    tmpMap.put(_Fields.I16_OPTIONAL_FIELD, new org.apache.thrift.meta_data.FieldMetaData("i16OptionalField", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+    tmpMap.put(_Fields.I16_OPTIONAL_FIELD, new org.apache.thrift.meta_data.FieldMetaData("i16OptionalField", org.apache.thrift.TFieldRequirementType.OPTIONAL,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16)));
-    tmpMap.put(_Fields.I32_FIELD, new org.apache.thrift.meta_data.FieldMetaData("i32Field", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+    tmpMap.put(_Fields.I32_FIELD, new org.apache.thrift.meta_data.FieldMetaData("i32Field", org.apache.thrift.TFieldRequirementType.OPTIONAL,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
-    tmpMap.put(_Fields.I64_FIELD, new org.apache.thrift.meta_data.FieldMetaData("i64Field", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.I64_FIELD, new org.apache.thrift.meta_data.FieldMetaData("i64Field", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
-    tmpMap.put(_Fields.DOUBLE_FIELD, new org.apache.thrift.meta_data.FieldMetaData("doubleField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.DOUBLE_FIELD, new org.apache.thrift.meta_data.FieldMetaData("doubleField", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.DOUBLE)));
-    tmpMap.put(_Fields.STRING_FIELD, new org.apache.thrift.meta_data.FieldMetaData("stringField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.STRING_FIELD, new org.apache.thrift.meta_data.FieldMetaData("stringField", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
-    tmpMap.put(_Fields.BINARY_FIELD, new org.apache.thrift.meta_data.FieldMetaData("binaryField", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+    tmpMap.put(_Fields.BINARY_FIELD, new org.apache.thrift.meta_data.FieldMetaData("binaryField", org.apache.thrift.TFieldRequirementType.OPTIONAL,
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING        , true)));
-    tmpMap.put(_Fields.MAP_FIELD, new org.apache.thrift.meta_data.FieldMetaData("mapField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
-            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), 
+    tmpMap.put(_Fields.MAP_FIELD, new org.apache.thrift.meta_data.FieldMetaData("mapField", org.apache.thrift.TFieldRequirementType.DEFAULT,
+        new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
             new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))));
-    tmpMap.put(_Fields.LIST_FIELD, new org.apache.thrift.meta_data.FieldMetaData("listField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
+    tmpMap.put(_Fields.LIST_FIELD, new org.apache.thrift.meta_data.FieldMetaData("listField", org.apache.thrift.TFieldRequirementType.DEFAULT,
+        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
             new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))));
-    tmpMap.put(_Fields.SET_FIELD, new org.apache.thrift.meta_data.FieldMetaData("setField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-        new org.apache.thrift.meta_data.SetMetaData(org.apache.thrift.protocol.TType.SET, 
+    tmpMap.put(_Fields.SET_FIELD, new org.apache.thrift.meta_data.FieldMetaData("setField", org.apache.thrift.TFieldRequirementType.DEFAULT,
+        new org.apache.thrift.meta_data.SetMetaData(org.apache.thrift.protocol.TType.SET,
             new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))));
-    tmpMap.put(_Fields.ENUM_FIELD, new org.apache.thrift.meta_data.FieldMetaData("enumField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.ENUM_FIELD, new org.apache.thrift.meta_data.FieldMetaData("enumField", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, E.class)));
-    tmpMap.put(_Fields.STRUCT_FIELD, new org.apache.thrift.meta_data.FieldMetaData("structField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.STRUCT_FIELD, new org.apache.thrift.meta_data.FieldMetaData("structField", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Nested.class)));
-    tmpMap.put(_Fields.FOO_OR_BAR, new org.apache.thrift.meta_data.FieldMetaData("fooOrBar", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+    tmpMap.put(_Fields.FOO_OR_BAR, new org.apache.thrift.meta_data.FieldMetaData("fooOrBar", org.apache.thrift.TFieldRequirementType.DEFAULT,
         new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, FooOrBar.class)));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Test.class, metaDataMap);
@@ -690,7 +690,7 @@
   }
 
   /**
-   * 
+   *
    * @see E
    */
   public E getEnumField() {
@@ -698,7 +698,7 @@
   }
 
   /**
-   * 
+   *
    * @see E
    */
   public void setEnumField(E enumField) {
@@ -1496,7 +1496,7 @@
       while (true)
       {
         schemeField = iprot.readFieldBegin();
-        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
           break;
         }
         switch (schemeField.id) {
@@ -1504,7 +1504,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
               struct.boolField = iprot.readBool();
               struct.setBoolFieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1512,7 +1512,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.BYTE) {
               struct.byteField = iprot.readByte();
               struct.setByteFieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1520,7 +1520,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.BYTE) {
               struct.byteOptionalField = iprot.readByte();
               struct.setByteOptionalFieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1528,7 +1528,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.I16) {
               struct.i16Field = iprot.readI16();
               struct.setI16FieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1536,7 +1536,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.I16) {
               struct.i16OptionalField = iprot.readI16();
               struct.setI16OptionalFieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1544,7 +1544,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
               struct.i32Field = iprot.readI32();
               struct.setI32FieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1552,7 +1552,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
               struct.i64Field = iprot.readI64();
               struct.setI64FieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1560,7 +1560,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.DOUBLE) {
               struct.doubleField = iprot.readDouble();
               struct.setDoubleFieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1568,7 +1568,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
               struct.stringField = iprot.readString();
               struct.setStringFieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1576,7 +1576,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
               struct.binaryField = iprot.readBinary();
               struct.setBinaryFieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1596,7 +1596,7 @@
                 iprot.readMapEnd();
               }
               struct.setMapFieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1614,7 +1614,7 @@
                 iprot.readListEnd();
               }
               struct.setListFieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1632,7 +1632,7 @@
                 iprot.readSetEnd();
               }
               struct.setSetFieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1640,7 +1640,7 @@
             if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
               struct.enumField = E.findByValue(iprot.readI32());
               struct.setEnumFieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1649,7 +1649,7 @@
               struct.structField = new Nested();
               struct.structField.read(iprot);
               struct.setStructFieldIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
@@ -1658,7 +1658,7 @@
               struct.fooOrBar = new FooOrBar();
               struct.fooOrBar.read(iprot);
               struct.setFooOrBarIsSet(true);
-            } else { 
+            } else {
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
diff --git a/lang/java/tools/pom.xml b/lang/java/tools/pom.xml
index c1e5f69..18e0155 100644
--- a/lang/java/tools/pom.xml
+++ b/lang/java/tools/pom.xml
@@ -202,7 +202,7 @@
     </dependency>
 
     <!--For testing TetherTool we need the mapred test jar
-	because that contains the word count example.-->
+        because that contains the word count example.-->
     <dependency>
       <groupId>${project.groupId}</groupId>
       <artifactId>avro-mapred</artifactId>
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/BinaryFragmentToJsonTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/BinaryFragmentToJsonTool.java
index 14372e0..32e7999 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/BinaryFragmentToJsonTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/BinaryFragmentToJsonTool.java
@@ -47,12 +47,12 @@
         .accepts("schema-file", "File containing schema, must not occur with inline schema.")
         .withOptionalArg()
         .ofType(String.class);
-    
+
     OptionSet optionSet = optionParser.parse(args.toArray(new String[0]));
     Boolean noPretty = optionSet.has(noPrettyOption);
     List<String> nargs = (List<String>)optionSet.nonOptionArguments();
     String schemaFile = schemaFileOption.value(optionSet);
-    
+
     if (nargs.size() != (schemaFile == null ? 2 : 1)) {
       err.println("fragtojson --no-pretty --schema-file <file> [inline-schema] input-file");
       err.println("   converts Avro fragments to JSON.");
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/CatTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/CatTool.java
index 1fd1087..162a6de 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/CatTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/CatTool.java
@@ -40,17 +40,17 @@
 
 /** Tool to extract samples from an Avro data file. */
 public class CatTool implements Tool {
-  
+
   private long totalCopied;
   private double sampleCounter;
-  
+
   private GenericRecord reuse;
   private DataFileStream<GenericRecord> reader;
   private DataFileWriter<GenericRecord> writer;
   private Schema schema;
   private List<Path> inFiles;
   private int currentInput;
-  
+
   @Override
   public int run(InputStream in, PrintStream out, PrintStream err,
       List<String> args) throws Exception {
@@ -64,12 +64,12 @@
       .accepts("limit", "maximum number of records in the outputfile")
       .withRequiredArg()
       .ofType(Long.class)
-      .defaultsTo(Long.MAX_VALUE); 
+      .defaultsTo(Long.MAX_VALUE);
     OptionSpec<Double> fracOpt = optParser
       .accepts("samplerate", "rate at which records will be collected")
       .withRequiredArg()
       .ofType(Double.class)
-      .defaultsTo(new Double(1)); 
+      .defaultsTo(new Double(1));
 
     OptionSet opts = optParser.parse(args.toArray(new String[0]));
     List<String> nargs = (List<String>)opts.nonOptionArguments();
@@ -77,7 +77,7 @@
       printHelp(out);
       return 0;
     }
-    
+
     inFiles = Util.getFiles(nargs.subList(0, nargs.size()-1));
 
     System.out.println("List of input files:");
@@ -86,7 +86,7 @@
     }
     currentInput = -1;
     nextInput();
-   
+
     OutputStream output = out;
     String lastArg = nargs.get(nargs.size()-1);
     if (nargs.size() > 1 && !lastArg.equals("-")) {
@@ -94,7 +94,7 @@
     }
     writer = new DataFileWriter<GenericRecord>(
         new GenericDatumWriter<GenericRecord>());
-    
+
     String codecName = reader.getMetaString(DataFileConstants.CODEC);
     CodecFactory codec = (codecName == null)
         ? CodecFactory.fromString(DataFileConstants.NULL_CODEC)
@@ -106,14 +106,14 @@
       }
     }
     writer.create(schema, output);
-    
+
     long  offset = opts.valueOf(offsetOpt);
     long limit = opts.valueOf(limitOpt);
     double samplerate = opts.valueOf(fracOpt);
     sampleCounter = 1;
     totalCopied = 0;
     reuse = null;
-    
+
     if (limit < 0) {
       System.out.println("limit has to be non-negative");
       this.printHelp(out);
@@ -133,16 +133,16 @@
     skip(offset);
     writeRecords(limit, samplerate);
     System.out.println(totalCopied + " records written.");
-  
+
     writer.flush();
     writer.close();
     Util.close(out);
     return 0;
   }
-  
+
   private void nextInput() throws IOException{
     currentInput++;
-    Path path = inFiles.get(currentInput); 
+    Path path = inFiles.get(currentInput);
     FSDataInputStream input = new FSDataInputStream(Util.openFromFS(path));
     reader = new DataFileStream<GenericRecord>(input, new GenericDatumReader<GenericRecord>());
     if (schema == null) {                            // if this is the first file, the schema gets saved
@@ -152,11 +152,11 @@
       throw new IOException("schemas dont match");
     }
   }
-  
+
   private boolean hasNextInput() {
     return inFiles.size() > (currentInput + 1);
   }
-  
+
   /**skips a number of records from the input*/
   private long skip(long skip) throws IOException {
     long skipped = 0;
@@ -171,7 +171,7 @@
     }
   return skipped;
 }
-  
+
   /** writes records with the given samplerate
    * The record at position offset is guaranteed to be taken*/
   private long writeRecords(long count, double samplerate) throws IOException {
@@ -188,11 +188,11 @@
     totalCopied = totalCopied + written;
     if (written < count && hasNextInput()) { // goto next file
       nextInput();
-      written = written + writeRecords(count - written, samplerate);  
+      written = written + writeRecords(count - written, samplerate);
     }
     return written;
   }
-  
+
   private void printHelp(PrintStream out) {
     out.println("cat --offset <offset> --limit <limit> --samplerate <samplerate> [input-files...] output-file");
     out.println();
@@ -212,5 +212,5 @@
   public String getShortDescription() {
     return "extracts samples from files";
   }
-  
+
 }
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetMetaTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetMetaTool.java
index bc9c6f3..4711d8f 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetMetaTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetMetaTool.java
@@ -45,7 +45,7 @@
   @Override
   public int run(InputStream stdin, PrintStream out, PrintStream err,
       List<String> args) throws Exception {
-    
+
     OptionParser p = new OptionParser();
     OptionSpec<String> keyOption =
         p.accepts("key", "Metadata key")
@@ -53,7 +53,7 @@
         .ofType(String.class);
     OptionSet opts = p.parse(args.toArray(new String[0]));
     String keyName = keyOption.value(opts);
-    
+
     List<String> nargs = (List<String>)opts.nonOptionArguments();
     if (nargs.size() != 1) {
       err.println("Expected 1 arg: input_file");
@@ -81,7 +81,7 @@
     }
     return 0;
   }
-  
+
   // escape TAB, NL and CR in keys, so that output can be reliably parsed
   static String escapeKey(String key) {
     key = key.replace("\\","\\\\");               // escape backslashes first
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileRepairTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileRepairTool.java
index 0fdd8e5..c3d31bc 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileRepairTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileRepairTool.java
@@ -45,7 +45,7 @@
   public String getShortDescription() {
     return "Recovers data from a corrupt Avro Data file";
   }
-  
+
   private void printInfo(PrintStream output) {
     output.println("Insufficient arguments.  Arguments:  [-o option] "
         + "input_file output_file \n"
@@ -101,7 +101,7 @@
       if (args.size() - index < 1) {
         printInfo(err);
         return 1;
-      } 
+      }
     }
     if (ALL.equals(option)) {
       return recoverAll(input, args.get(index), out, err);
@@ -151,7 +151,7 @@
         } catch (Exception e) {
           e.printStackTrace(err);
           return 1;
-        } 
+        }
       } else {
         return innerRecover(fileReader, null, out, err, recoverPrior,
             recoverAfter, null, null);
@@ -236,7 +236,7 @@
                     + (pos));
                 if (lastRecordWasBad) {
                   // consecutive bad record
-                  err.println("Second consecutive bad record in block: " + numBlocks 
+                  err.println("Second consecutive bad record in block: " + numBlocks
                       + ". Skipping remainder of block. ");
                   numCorruptRecords += blockRemaining;
                   badRecordsInBlock += blockRemaining;
@@ -272,7 +272,7 @@
             e2.printStackTrace(err);
             return 1;
           }
-        } 
+        }
       }
     } finally {
       if (fileWritten) {
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileWriteTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileWriteTool.java
index a8c85e0..bce948e 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileWriteTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileWriteTool.java
@@ -82,7 +82,7 @@
     Schema schema = (schemafile != null)
         ? Util.parseSchemaFromFS(schemafile)
         : new Schema.Parser().parse(schemastr);
-    
+
     DatumReader<Object> reader = new GenericDatumReader<Object>(schema);
 
     InputStream input = Util.fileOrStdin(nargs.get(0), stdin);
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/FromTextTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/FromTextTool.java
index 115155e..0026a24 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/FromTextTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/FromTextTool.java
@@ -34,13 +34,13 @@
 import org.apache.avro.generic.GenericDatumWriter;
 
 /** Reads a text file into an Avro data file.
- * 
+ *
  * Can accept a file name, and HDFS file URI, or stdin. Can write to a file
  * name, an HDFS URI, or stdout.*/
 public class FromTextTool implements Tool {
-  private static final String TEXT_FILE_SCHEMA = 
+  private static final String TEXT_FILE_SCHEMA =
     "\"bytes\"";
-  
+
   @Override
   public String getName() {
     return "fromtext";
@@ -54,7 +54,7 @@
   @Override
   public int run(InputStream stdin, PrintStream out, PrintStream err,
       List<String> args) throws Exception {
-    
+
     OptionParser p = new OptionParser();
     OptionSpec<Integer> level = Util.compressionLevelOption(p);
     OptionSpec<String> codec = Util.compressionCodecOption(p);
@@ -68,12 +68,12 @@
       p.printHelpOn(err);
       return 1;
     }
- 
+
     CodecFactory codecFactory = Util.codecFactory(opts, codec, level);
-  
+
     BufferedInputStream inStream = Util.fileOrStdin(nargs.get(0), stdin);
     BufferedOutputStream outStream = Util.fileOrStdout(nargs.get(1), out);
-    
+
     DataFileWriter<ByteBuffer> writer =
         new DataFileWriter<ByteBuffer>(new GenericDatumWriter<ByteBuffer>());
     writer.setCodec(codecFactory);
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/IdlTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/IdlTool.java
index 48352e3..f3cfe24 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/IdlTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/IdlTool.java
@@ -57,7 +57,7 @@
     } else {
       parser = new Idl(in);
     }
-    
+
     if (args.size() == 2 && ! "-".equals(args.get(1))) {
       parseOut = new PrintStream(new FileOutputStream(args.get(1)));
     }
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/JsonToBinaryFragmentTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/JsonToBinaryFragmentTool.java
index ba56cea..1232d2a 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/JsonToBinaryFragmentTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/JsonToBinaryFragmentTool.java
@@ -44,11 +44,11 @@
         .accepts("schema-file", "File containing schema, must not occur with inline schema.")
         .withOptionalArg()
         .ofType(String.class);
-    
+
     OptionSet optionSet = optionParser.parse(args.toArray(new String[0]));
     List<String> nargs = (List<String>)optionSet.nonOptionArguments();
     String schemaFile = schemaFileOption.value(optionSet);
-    
+
     if (nargs.size() != (schemaFile == null ? 2 : 1)) {
       err.println("jsontofrag --schema-file <file> [inline-schema] input-file");
       err.println("   converts JSON to Avro fragments.");
@@ -68,12 +68,12 @@
     InputStream input = Util.fileOrStdin(inputFile, stdin);
 
     try {
-      GenericDatumReader<Object> reader = 
+      GenericDatumReader<Object> reader =
           new GenericDatumReader<Object>(schema);
-    
-      JsonDecoder jsonDecoder = 
+
+      JsonDecoder jsonDecoder =
       DecoderFactory.get().jsonDecoder(schema, input);
-      GenericDatumWriter<Object> writer = 
+      GenericDatumWriter<Object> writer =
           new GenericDatumWriter<Object>(schema);
       Encoder e = EncoderFactory.get().binaryEncoder(out, null);
       Object datum = null;
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/RpcReceiveTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/RpcReceiveTool.java
index 29eb738..aebca55 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/RpcReceiveTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/RpcReceiveTool.java
@@ -60,7 +60,7 @@
   public String getShortDescription() {
     return "Opens an RPC Server and listens for one message.";
   }
-  
+
   private class SinkResponder extends GenericResponder {
 
     public SinkResponder(Protocol local) {
@@ -71,7 +71,7 @@
     public Object respond(Message message, Object request)
     throws AvroRemoteException {
       if (!message.equals(expectedMessage)) {
-        out.println(String.format("Expected message '%s' but received '%s'.", 
+        out.println(String.format("Expected message '%s' but received '%s'.",
             expectedMessage.getName(), message.getName()));
         latch.countDown();
         throw new IllegalArgumentException("Unexpected message.");
@@ -102,7 +102,7 @@
       return response;
     }
   }
-  
+
   @Override
   public int run(InputStream in, PrintStream out, PrintStream err,
       List<String> args) throws Exception {
@@ -154,16 +154,16 @@
       err.println("One of -data or -file must be specified.");
       return 1;
     }
-    
+
     this.out = out;
-    
+
     latch = new CountDownLatch(1);
     server = Ipc.createServer(new SinkResponder(protocol), uri);
     server.start();
     out.println("Port: " + server.getPort());
     return 0;
   }
-  
+
   int run2(PrintStream err) throws InterruptedException {
     latch.await();
     err.println("Closing server.");
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/RpcSendTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/RpcSendTool.java
index 04e729c..20a6801 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/RpcSendTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/RpcSendTool.java
@@ -85,7 +85,7 @@
           messageName, protocol));
       return 1;
     }
-    
+
     Object datum;
     if (data.value(opts) != null) {
       datum = Util.jsonToGenericDatum(message.getRequest(), data.value(opts));
@@ -103,7 +103,7 @@
     return 0;
   }
 
-  private void dumpJson(PrintStream out, Schema schema, Object datum) 
+  private void dumpJson(PrintStream out, Schema schema, Object datum)
   throws IOException {
     DatumWriter<Object> writer = new GenericDatumWriter<Object>(schema);
     JsonGenerator g =
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java
index 3eebf3b..a36d54c 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java
@@ -33,11 +33,11 @@
 
 /** Reads an avro data file into a plain text file. */
 public class ToTextTool implements Tool {
-  private static final String TEXT_FILE_SCHEMA = 
+  private static final String TEXT_FILE_SCHEMA =
         "\"bytes\"";
-  private static final byte[] LINE_SEPARATOR = 
+  private static final byte[] LINE_SEPARATOR =
         System.getProperty("line.separator").getBytes();
-    
+
   @Override
   public String getName() {
     return "totext";
@@ -51,7 +51,7 @@
   @Override
   public int run(InputStream stdin, PrintStream out, PrintStream err,
       List<String> args) throws Exception {
-      
+
     OptionParser p = new OptionParser();
     OptionSet opts = p.parse(args.toArray(new String[0]));
     if (opts.nonOptionArguments().size() != 2) {
@@ -73,7 +73,7 @@
       fileReader.close();
       return 1;
     }
-    
+
     while (fileReader.hasNext()) {
       ByteBuffer outBuff = (ByteBuffer) fileReader.next();
       outStream.write(outBuff.array());
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
index 361e6f6..bbd6fe5 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
@@ -64,7 +64,7 @@
       err.println("Usage: [-pretty] input");
       return 1;
     }
-    
+
     dump(TrevniUtil.input(filename), out, pretty);
 
     return 0;
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java
index 7ae815a..ae629a2 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java
@@ -68,7 +68,7 @@
       err.println("Usage: [-pretty] input");
       return 1;
     }
-    
+
     toJson(TrevniUtil.input(filename), out, pretty);
 
     return 0;
@@ -109,7 +109,7 @@
     out.println();
     reader.close();
   }
-  
+
   private void valueToJson(ColumnMetaData column) throws IOException {
     generator.writeFieldName(shortNames[column.getNumber()]);
     ColumnValues in = values[column.getNumber()];
@@ -138,7 +138,7 @@
     }
   }
 
-  private void primitiveToJson(ColumnMetaData column, Object value) 
+  private void primitiveToJson(ColumnMetaData column, Object value)
     throws IOException {
     switch (column.getType()) {
     case NULL:
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniUtil.java b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniUtil.java
index 9b49957..0158d1b 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniUtil.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniUtil.java
@@ -45,11 +45,11 @@
       return new InputFile(new File(filename));
     }
   }
-  
+
   /**
    * Returns stdin if filename is "-", else opens the local or HDFS file
    * and returns an InputStream for it.
-   * @throws IOException 
+   * @throws IOException
    */
   static InputStream input(String filename, InputStream stdin)
     throws IOException {
@@ -62,13 +62,13 @@
       return new BufferedInputStream(new FileInputStream(new File(filename)));
     }
   }
-  
+
   /**
    * Returns stdout if filename is "-", else opens the local or HDFS file
    * and returns an OutputStream for it.
-   * @throws IOException 
+   * @throws IOException
    */
-  static OutputStream output(String filename, OutputStream stdout) 
+  static OutputStream output(String filename, OutputStream stdout)
     throws IOException {
     if (filename.equals("-"))
       return new BufferedOutputStream(stdout);
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java b/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
index 22213af..708bb41 100644
--- a/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
@@ -51,65 +51,65 @@
   /**
    * Returns stdin if filename is "-", else opens the File in the owning filesystem
    * and returns an InputStream for it.
-   * Relative paths will be opened in the default filesystem. 
+   * Relative paths will be opened in the default filesystem.
    * @param filename The filename to be opened
-   * @throws IOException 
+   * @throws IOException
    */
-  static BufferedInputStream fileOrStdin(String filename, InputStream stdin) 
+  static BufferedInputStream fileOrStdin(String filename, InputStream stdin)
       throws IOException {
     return new BufferedInputStream(filename.equals("-")
         ? stdin
-        : openFromFS(filename));  
+        : openFromFS(filename));
   }
-  
+
   /**
    * Returns stdout if filename is "-", else opens the file from the owning filesystem
    * and returns an OutputStream for it.
-   * Relative paths will be opened in the default filesystem.  
+   * Relative paths will be opened in the default filesystem.
    * @param filename The filename to be opened
-   * @throws IOException 
+   * @throws IOException
    */
-  static BufferedOutputStream fileOrStdout(String filename, OutputStream stdout) 
+  static BufferedOutputStream fileOrStdout(String filename, OutputStream stdout)
       throws IOException {
     return new BufferedOutputStream(filename.equals("-")
         ? stdout
         : createFromFS(filename));
   }
-  
+
   /**
    * Returns an InputStream for the file using the owning filesystem,
    * or the default if none is given.
    * @param filename The filename to be opened
-   * @throws IOException 
+   * @throws IOException
    */
-  static InputStream openFromFS(String filename) 
+  static InputStream openFromFS(String filename)
       throws IOException {
     Path p = new Path(filename);
     return p.getFileSystem(new Configuration()).open(p);
   }
-  
+
   /**
    * Returns an InputStream for the file using the owning filesystem,
    * or the default if none is given.
    * @param filename The filename to be opened
-   * @throws IOException 
+   * @throws IOException
    */
-  static InputStream openFromFS(Path filename) 
+  static InputStream openFromFS(Path filename)
       throws IOException {
     return filename.getFileSystem(new Configuration()).open(filename);
   }
-  
+
   /**
-   * Returns a seekable FsInput using the owning filesystem, 
+   * Returns a seekable FsInput using the owning filesystem,
    * or the default if none is given.
    * @param filename The filename to be opened
-   * @throws IOException 
+   * @throws IOException
    */
-  static FsInput openSeekableFromFS(String filename) 
-      throws IOException {       
+  static FsInput openSeekableFromFS(String filename)
+      throws IOException {
     return new FsInput(new Path(filename), new Configuration());
   }
-  
+
   /**
    * Opens the file for writing in the owning filesystem,
    * or the default if none is given.
@@ -117,14 +117,14 @@
    * @return An OutputStream to the specified file.
    * @throws IOException
    */
-  static OutputStream createFromFS(String filename) 
+  static OutputStream createFromFS(String filename)
       throws IOException {
     Path p = new Path(filename);
     return new BufferedOutputStream(p.getFileSystem(new Configuration()).create(p));
   }
-  
+
   /**
-   * Closes the inputstream created from {@link Util.fileOrStdin} 
+   * Closes the inputstream created from {@link Util.fileOrStdin}
    * unless it is System.in.
    * @param in The inputstream to be closed.
    */
@@ -137,7 +137,7 @@
       }
     }
   }
-  
+
   /**
    * Closes the outputstream created from {@link Util.fileOrStdout}
    * unless it is System.out.
@@ -152,7 +152,7 @@
       }
     }
   }
-  
+
   /**
    * Parses a schema from the specified file.
    * @param filename The file name to parse
@@ -172,18 +172,18 @@
    * if pathname is a directory, this method returns a list of Pathes to all the files within
    * this directory.
    * Only files inside that directory are included, no subdirectories or files in subdirectories
-   * will be added. 
+   * will be added.
    * The List is sorted alphabetically.
    * @param fileOrDirName filename or directoryname
-   * @return A Path List 
+   * @return A Path List
    * @throws IOException
    */
-  static List<Path> getFiles(String fileOrDirName) 
+  static List<Path> getFiles(String fileOrDirName)
     throws IOException {
-    List<Path> pathList = new ArrayList<Path>();  
+    List<Path> pathList = new ArrayList<Path>();
     Path path = new Path(fileOrDirName);
     FileSystem fs = path.getFileSystem(new Configuration());
-    
+
     if (fs.isFile(path)) {
       pathList.add(path);
     }
@@ -197,16 +197,16 @@
     Collections.sort(pathList);
     return pathList;
   }
-  
+
   /**
    * This method returns a list which contains a path to every given file
    * in the input and a path to every file inside a given directory.
    * The list is sorted alphabetically and contains no subdirectories or files within those.
    * @param fileOrDirNames A list of filenames and directorynames
-   * @return A list of Pathes, one for each file 
+   * @return A list of Pathes, one for each file
    * @throws IOException
    */
-  static List<Path> getFiles(List<String> fileOrDirNames) 
+  static List<Path> getFiles(List<String> fileOrDirNames)
       throws IOException {
     ArrayList<Path> pathList = new ArrayList<Path>();
     for(String name : fileOrDirNames) {
@@ -215,11 +215,11 @@
     Collections.sort(pathList);
     return pathList;
   }
-  
-  /** 
+
+  /**
    * Converts a String JSON object into a generic datum.
-   * 
-   * This is inefficient (creates extra objects), so should be used 
+   *
+   * This is inefficient (creates extra objects), so should be used
    * sparingly.
    */
   static Object jsonToGenericDatum(Schema schema, String jsonData)
diff --git a/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Player.java b/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Player.java
index 9d7fbc8..07a3483 100644
--- a/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Player.java
+++ b/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Player.java
@@ -1,9 +1,9 @@
 /**
  * Autogenerated by Avro
- * 
+ *
  * DO NOT EDIT DIRECTLY
  */
-package avro.examples.baseball;  
+package avro.examples.baseball;
 
 import org.apache.avro.specific.SpecificData;
 
@@ -23,7 +23,7 @@
   /**
    * Default constructor.  Note that this does not initialize fields
    * to their default values from the schema.  If that is desired then
-   * one should use <code>newBuilder()</code>. 
+   * one should use <code>newBuilder()</code>.
    */
   public Player() {}
 
@@ -42,7 +42,7 @@
   }
 
   public org.apache.avro.Schema getSchema() { return SCHEMA$; }
-  // Used by DatumWriter.  Applications should not call. 
+  // Used by DatumWriter.  Applications should not call.
   public java.lang.Object get(int field$) {
     switch (field$) {
     case 0: return number;
@@ -52,7 +52,7 @@
     default: throw new org.apache.avro.AvroRuntimeException("Bad index");
     }
   }
-  // Used by DatumReader.  Applications should not call. 
+  // Used by DatumReader.  Applications should not call.
   @SuppressWarnings(value="unchecked")
   public void put(int field$, java.lang.Object value$) {
     switch (field$) {
@@ -136,7 +136,7 @@
   public static avro.examples.baseball.Player.Builder newBuilder() {
     return new avro.examples.baseball.Player.Builder();
   }
-  
+
   /**
    * Creates a new Player RecordBuilder by copying an existing Builder.
    * @param other The existing builder to copy.
@@ -145,7 +145,7 @@
   public static avro.examples.baseball.Player.Builder newBuilder(avro.examples.baseball.Player.Builder other) {
     return new avro.examples.baseball.Player.Builder(other);
   }
-  
+
   /**
    * Creates a new Player RecordBuilder by copying an existing Player instance.
    * @param other The existing instance to copy.
@@ -154,7 +154,7 @@
   public static avro.examples.baseball.Player.Builder newBuilder(avro.examples.baseball.Player other) {
     return new avro.examples.baseball.Player.Builder(other);
   }
-  
+
   /**
    * RecordBuilder for Player instances.
    */
@@ -171,7 +171,7 @@
     private Builder() {
       super(SCHEMA$);
     }
-    
+
     /**
      * Creates a Builder by copying an existing Builder.
      * @param other The existing Builder to copy.
@@ -195,7 +195,7 @@
         fieldSetFlags()[3] = true;
       }
     }
-    
+
     /**
      * Creates a Builder by copying an existing Player instance
      * @param other The existing instance to copy.
@@ -239,7 +239,7 @@
       validate(fields()[0], value);
       this.number = value;
       fieldSetFlags()[0] = true;
-      return this; 
+      return this;
     }
 
     /**
@@ -279,7 +279,7 @@
       validate(fields()[1], value);
       this.first_name = value;
       fieldSetFlags()[1] = true;
-      return this; 
+      return this;
     }
 
     /**
@@ -318,7 +318,7 @@
       validate(fields()[2], value);
       this.last_name = value;
       fieldSetFlags()[2] = true;
-      return this; 
+      return this;
     }
 
     /**
@@ -357,7 +357,7 @@
       validate(fields()[3], value);
       this.position = value;
       fieldSetFlags()[3] = true;
-      return this; 
+      return this;
     }
 
     /**
@@ -395,7 +395,7 @@
   }
 
   private static final org.apache.avro.io.DatumWriter
-    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);  
+    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);
 
   @Override public void writeExternal(java.io.ObjectOutput out)
     throws java.io.IOException {
@@ -403,7 +403,7 @@
   }
 
   private static final org.apache.avro.io.DatumReader
-    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);  
+    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);
 
   @Override public void readExternal(java.io.ObjectInput in)
     throws java.io.IOException {
diff --git a/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Position.java b/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Position.java
index 2d17900..4effedd 100644
--- a/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Position.java
+++ b/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Position.java
@@ -1,12 +1,12 @@
 /**
  * Autogenerated by Avro
- * 
+ *
  * DO NOT EDIT DIRECTLY
  */
-package avro.examples.baseball;  
+package avro.examples.baseball;
 @SuppressWarnings("all")
 @org.apache.avro.specific.AvroGenerated
-public enum Position { 
+public enum Position {
   P, C, B1, B2, B3, SS, LF, CF, RF, DH  ;
   public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"Position\",\"namespace\":\"avro.examples.baseball\",\"symbols\":[\"P\",\"C\",\"B1\",\"B2\",\"B3\",\"SS\",\"LF\",\"CF\",\"RF\",\"DH\"]}");
   public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
diff --git a/lang/java/tools/src/test/compiler/output/Player.java b/lang/java/tools/src/test/compiler/output/Player.java
index bf0ed62..252eaab 100644
--- a/lang/java/tools/src/test/compiler/output/Player.java
+++ b/lang/java/tools/src/test/compiler/output/Player.java
@@ -1,9 +1,9 @@
 /**
  * Autogenerated by Avro
- * 
+ *
  * DO NOT EDIT DIRECTLY
  */
-package avro.examples.baseball;  
+package avro.examples.baseball;
 
 import org.apache.avro.specific.SpecificData;
 
@@ -23,7 +23,7 @@
   /**
    * Default constructor.  Note that this does not initialize fields
    * to their default values from the schema.  If that is desired then
-   * one should use <code>newBuilder()</code>. 
+   * one should use <code>newBuilder()</code>.
    */
   public Player() {}
 
@@ -42,7 +42,7 @@
   }
 
   public org.apache.avro.Schema getSchema() { return SCHEMA$; }
-  // Used by DatumWriter.  Applications should not call. 
+  // Used by DatumWriter.  Applications should not call.
   public java.lang.Object get(int field$) {
     switch (field$) {
     case 0: return number;
@@ -52,7 +52,7 @@
     default: throw new org.apache.avro.AvroRuntimeException("Bad index");
     }
   }
-  // Used by DatumReader.  Applications should not call. 
+  // Used by DatumReader.  Applications should not call.
   @SuppressWarnings(value="unchecked")
   public void put(int field$, java.lang.Object value$) {
     switch (field$) {
@@ -136,7 +136,7 @@
   public static avro.examples.baseball.Player.Builder newBuilder() {
     return new avro.examples.baseball.Player.Builder();
   }
-  
+
   /**
    * Creates a new Player RecordBuilder by copying an existing Builder.
    * @param other The existing builder to copy.
@@ -145,7 +145,7 @@
   public static avro.examples.baseball.Player.Builder newBuilder(avro.examples.baseball.Player.Builder other) {
     return new avro.examples.baseball.Player.Builder(other);
   }
-  
+
   /**
    * Creates a new Player RecordBuilder by copying an existing Player instance.
    * @param other The existing instance to copy.
@@ -154,7 +154,7 @@
   public static avro.examples.baseball.Player.Builder newBuilder(avro.examples.baseball.Player other) {
     return new avro.examples.baseball.Player.Builder(other);
   }
-  
+
   /**
    * RecordBuilder for Player instances.
    */
@@ -171,7 +171,7 @@
     private Builder() {
       super(SCHEMA$);
     }
-    
+
     /**
      * Creates a Builder by copying an existing Builder.
      * @param other The existing Builder to copy.
@@ -195,7 +195,7 @@
         fieldSetFlags()[3] = true;
       }
     }
-    
+
     /**
      * Creates a Builder by copying an existing Player instance
      * @param other The existing instance to copy.
@@ -239,7 +239,7 @@
       validate(fields()[0], value);
       this.number = value;
       fieldSetFlags()[0] = true;
-      return this; 
+      return this;
     }
 
     /**
@@ -279,7 +279,7 @@
       validate(fields()[1], value);
       this.first_name = value;
       fieldSetFlags()[1] = true;
-      return this; 
+      return this;
     }
 
     /**
@@ -318,7 +318,7 @@
       validate(fields()[2], value);
       this.last_name = value;
       fieldSetFlags()[2] = true;
-      return this; 
+      return this;
     }
 
     /**
@@ -357,7 +357,7 @@
       validate(fields()[3], value);
       this.position = value;
       fieldSetFlags()[3] = true;
-      return this; 
+      return this;
     }
 
     /**
@@ -395,7 +395,7 @@
   }
 
   private static final org.apache.avro.io.DatumWriter
-    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);  
+    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);
 
   @Override public void writeExternal(java.io.ObjectOutput out)
     throws java.io.IOException {
@@ -403,7 +403,7 @@
   }
 
   private static final org.apache.avro.io.DatumReader
-    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);  
+    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);
 
   @Override public void readExternal(java.io.ObjectInput in)
     throws java.io.IOException {
diff --git a/lang/java/tools/src/test/compiler/output/Position.java b/lang/java/tools/src/test/compiler/output/Position.java
index 2d17900..4effedd 100644
--- a/lang/java/tools/src/test/compiler/output/Position.java
+++ b/lang/java/tools/src/test/compiler/output/Position.java
@@ -1,12 +1,12 @@
 /**
  * Autogenerated by Avro
- * 
+ *
  * DO NOT EDIT DIRECTLY
  */
-package avro.examples.baseball;  
+package avro.examples.baseball;
 @SuppressWarnings("all")
 @org.apache.avro.specific.AvroGenerated
-public enum Position { 
+public enum Position {
   P, C, B1, B2, B3, SS, LF, CF, RF, DH  ;
   public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"Position\",\"namespace\":\"avro.examples.baseball\",\"symbols\":[\"P\",\"C\",\"B1\",\"B2\",\"B3\",\"SS\",\"LF\",\"CF\",\"RF\",\"DH\"]}");
   public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestCatTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestCatTool.java
index d0c6d65..312bd76 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestCatTool.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestCatTool.java
@@ -52,7 +52,7 @@
   private static final int LIMIT_OUT_OF_INPUT_BOUNDS = 100001;
   private static final double SAMPLERATE = .01;
   private static final double SAMPLERATE_TOO_SMALL = .00000001;
-  
+
   private final Schema INTSCHEMA = new Schema.Parser().parse(
     "{\"type\":\"record\", " +
     "\"name\":\"myRecord\", " +
@@ -66,7 +66,7 @@
     "]}");
   private static final CodecFactory DEFLATE = CodecFactory.deflateCodec(9);
   private static final CodecFactory SNAPPY = CodecFactory.snappyCodec();
-  
+
 
   private GenericRecord aDatum(Type ofType, int forRow) {
     GenericRecord record = null;
@@ -78,7 +78,7 @@
       case INT:
         record = new GenericData.Record(INTSCHEMA);
         record.put("value", forRow);
-        return record;      
+        return record;
       default:
        throw new AssertionError("I can't generate data for this type");
     }
@@ -95,7 +95,7 @@
     if(type.equals(Schema.Type.STRING)) {
       schema = STRINGSCHEMA;
     }
-       
+
     DataFileWriter<Object> writer = new DataFileWriter<Object>(
               new GenericDatumWriter<Object>(schema));
     for(Entry<String, String> metadatum : metadata.entrySet()) {
@@ -111,10 +111,10 @@
 
     return inputFile;
   }
- 
-  
+
+
   private int getFirstIntDatum(File file) throws Exception {
-    DataFileStream<GenericRecord> reader = new DataFileStream<GenericRecord>( new FileInputStream(file) , 
+    DataFileStream<GenericRecord> reader = new DataFileStream<GenericRecord>( new FileInputStream(file) ,
       new GenericDatumReader<GenericRecord>());
 
     int result = (Integer) reader.next().get(0);
@@ -166,7 +166,7 @@
     assertEquals(0, returnCode);
 
     assertEquals(LIMIT_WITHIN_INPUT_BOUNDS, numRowsInFile(output));
-    
+
 //    folder input
     args = asList(
       input1.getParentFile().getAbsolutePath(),
@@ -182,7 +182,7 @@
     assertEquals(LIMIT_WITHIN_INPUT_BOUNDS, numRowsInFile(output));
   }
 
-  
+
   @Test
   public void testLimitOutOfBounds() throws Exception {
     Map<String, String> metadata = new HashMap<String, String>();
@@ -203,9 +203,9 @@
       System.err,
       args);
     assertEquals(0, returnCode);
-    assertEquals(ROWS_IN_INPUT_FILES - OFFSET, numRowsInFile(output)); 
+    assertEquals(ROWS_IN_INPUT_FILES - OFFSET, numRowsInFile(output));
   }
-  
+
   @Test
   public void testSamplerateAccuracy() throws Exception {
     Map<String, String> metadata = new HashMap<String, String>();
@@ -226,9 +226,9 @@
       System.err,
       args);
     assertEquals(0, returnCode);
-    
+
     assertTrue("Outputsize is not roughly (Inputsize - Offset) * samplerate",
-      (ROWS_IN_INPUT_FILES - OFFSET)*SAMPLERATE - numRowsInFile(output) < 2);    
+      (ROWS_IN_INPUT_FILES - OFFSET)*SAMPLERATE - numRowsInFile(output) < 2);
     assertTrue("", (ROWS_IN_INPUT_FILES - OFFSET)*SAMPLERATE - numRowsInFile(output) > -2);
   }
 
@@ -256,7 +256,7 @@
     assertEquals("output does not start at offset",
       OFFSET, getFirstIntDatum(output));
   }
-  
+
   @Test
   public void testOffsetBiggerThanInput() throws Exception{
     Map<String, String> metadata = new HashMap<String, String>();
@@ -279,7 +279,7 @@
     assertEquals("output is not empty",
       0, numRowsInFile(output));
   }
-  
+
   @Test
   public void testSamplerateSmallerThanInput() throws Exception{
     Map<String, String> metadata = new HashMap<String, String>();
@@ -300,12 +300,12 @@
       System.err,
       args);
     assertEquals(0, returnCode);
-    
+
     assertEquals("output should only contain the record at offset",
       (int) OFFSET, getFirstIntDatum(output));
   }
-  
-  
+
+
   @Test(expected = IOException.class)
   public void testDifferentSchemasFail() throws Exception {
     Map<String, String> metadata = new HashMap<String, String>();
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestCreateRandomFileTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestCreateRandomFileTool.java
index f5c6056..e752961 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestCreateRandomFileTool.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestCreateRandomFileTool.java
@@ -56,7 +56,7 @@
     }
     return baos.toByteArray();
   }
-  
+
   public void check(String... extraArgs) throws Exception {
     ArrayList<String> args = new ArrayList<String>();
     args.addAll(Arrays.asList(new String[] {
@@ -69,7 +69,7 @@
 
     DataFileReader<Object> reader =
       new DataFileReader(OUT_FILE, new GenericDatumReader<Object>());
-    
+
     Iterator<Object> found = reader.iterator();
     for (Object expected :
            new RandomData(Schema.parse(SCHEMA_FILE), Integer.parseInt(COUNT)))
@@ -94,11 +94,11 @@
     byte[] file =
       run(Arrays.asList(new String[]
         { "-", "--count", COUNT, "--schema-file", SCHEMA_FILE.toString() }));
-    
+
     DataFileStream<Object> reader =
       new DataFileStream(new ByteArrayInputStream(file),
                          new GenericDatumReader<Object>());
-    
+
     Iterator<Object> found = reader.iterator();
     for (Object expected :
            new RandomData(Schema.parse(SCHEMA_FILE), Integer.parseInt(COUNT)))
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileTools.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileTools.java
index 9661e4e..0270b71 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileTools.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileTools.java
@@ -52,7 +52,7 @@
   static String jsonData;
   static Schema schema;
   static File schemaFile;
-  
+
   private static final String KEY_NEEDING_ESCAPES = "trn\\\r\t\n";
   private static final String ESCAPED_KEY = "trn\\\\\\r\\t\\n";
 
@@ -65,7 +65,7 @@
     FileWriter fw = new FileWriter(schemaFile);
     fw.append(schema.toString());
     fw.close();
-    
+
     DataFileWriter<Object> writer
       = new DataFileWriter<Object>(new GenericDatumWriter<Object>(schema))
       .setMeta(KEY_NEEDING_ESCAPES, "")
@@ -80,10 +80,10 @@
 
     writer.flush();
     writer.close();
-    
+
     jsonData = builder.toString();
   }
-  
+
   private String run(Tool tool, String... args) throws Exception {
     return run(tool, null, args);
   }
@@ -110,49 +110,49 @@
     FileInputStream stdin = new FileInputStream(sampleFile);
     assertEquals(jsonData, run(new DataFileReadTool(), stdin, "-"));
   }
-  
+
   @Test
   public void testReadToJsonPretty() throws Exception {
     assertEquals(jsonData,
         run(new DataFileReadTool(), "--pretty", sampleFile.getPath()));
   }
-  
+
   @Test
   public void testGetMeta() throws Exception {
     String output = run(new DataFileGetMetaTool(), sampleFile.getPath());
     assertTrue(output, output.contains("avro.schema\t"+schema.toString()+"\n"));
     assertTrue(output, output.contains(ESCAPED_KEY+"\t\n"));
   }
-  
+
   @Test
   public void testGetMetaForSingleKey() throws Exception {
     assertEquals(schema.toString() + "\n",
         run(new DataFileGetMetaTool(), sampleFile.getPath(), "--key",
             "avro.schema"));
   }
-  
+
   @Test
   public void testGetSchema() throws Exception {
     assertEquals(schema.toString() + "\n",
         run(new DataFileGetSchemaTool(), sampleFile.getPath()));
   }
-  
+
   @Test
   public void testWriteWithDeflate() throws Exception {
     testWrite("deflate", Arrays.asList("--codec", "deflate"), "deflate");
   }
-  
+
   @Test
   public void testWrite() throws Exception {
     testWrite("plain", Collections.<String>emptyList(), "null");
   }
-  
-  public void testWrite(String name, List<String> extra, String expectedCodec) 
+
+  public void testWrite(String name, List<String> extra, String expectedCodec)
       throws Exception {
       testWrite(name, extra, expectedCodec, "-schema", schema.toString());
       testWrite(name, extra, expectedCodec, "-schema-file", schemaFile.toString());
   }
-  public void testWrite(String name, List<String> extra, String expectedCodec, String... extraArgs) 
+  public void testWrite(String name, List<String> extra, String expectedCodec, String... extraArgs)
   throws Exception {
     File outFile = AvroTestUtil.tempFile(getClass(),
         TestDataFileTools.class + ".testWrite." + name + ".avro");
@@ -171,7 +171,7 @@
         args);
     out.close();
     fout.close();
-    
+
     // Read it back, and make sure it's valid.
     GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
     DataFileReader<Object> fileReader = new DataFileReader<Object>(outFile,reader);
@@ -188,7 +188,7 @@
     }
     assertEquals(expectedCodec, codecStr);
   }
-  
+
   @Test
   public void testFailureOnWritingPartialJSONValues() throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
@@ -197,7 +197,7 @@
       new DataFileWriteTool().run(
           new StringBufferInputStream("{"),
           new PrintStream(out), // stdout
-          null, // stderr          
+          null, // stderr
           Arrays.asList("-schema", "{ \"type\":\"record\", \"fields\":" +
                         "[{\"name\":\"foo\", \"type\":\"string\"}], " +
                         "\"name\":\"boring\" }", "-"));
@@ -206,7 +206,7 @@
       // expected
     }
   }
-  
+
   @Test
   public void testWritingZeroJsonValues() throws Exception {
     File outFile = writeToAvroFile("zerojsonvalues",
@@ -214,10 +214,10 @@
         "");
     assertEquals(0, countRecords(outFile));
   }
-  
+
   private int countRecords(File outFile) throws IOException {
     GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
-    DataFileReader<Object> fileReader = 
+    DataFileReader<Object> fileReader =
       new DataFileReader<Object>(outFile,reader);
     int i = 0;
     for (@SuppressWarnings("unused") Object datum : fileReader) {
@@ -229,12 +229,12 @@
   @Test
   public void testDifferentSeparatorsBetweenJsonRecords() throws Exception {
     File outFile = writeToAvroFile(
-        "seperators", 
-        "{ \"type\":\"array\", \"items\":\"int\" }", 
+        "seperators",
+        "{ \"type\":\"array\", \"items\":\"int\" }",
         "[]    [] []\n[][3]     ");
     assertEquals(5, countRecords(outFile));
   }
-  
+
   public File writeToAvroFile(String testName, String schema, String json) throws Exception {
     File outFile = AvroTestUtil.tempFile(getClass(),
         TestDataFileTools.class + "." + testName + ".avro");
@@ -249,5 +249,5 @@
     fout.close();
     return outFile;
   }
-  
+
 }
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestJsonToFromBinaryFragmentTools.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestJsonToFromBinaryFragmentTools.java
index 557cac9..c5e3c97 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestJsonToFromBinaryFragmentTools.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestJsonToFromBinaryFragmentTools.java
@@ -37,22 +37,22 @@
 import org.junit.Test;
 
 /**
- * Tests both {@link JsonToBinaryFragmentTool} 
+ * Tests both {@link JsonToBinaryFragmentTool}
  * and {@link BinaryFragmentToJsonTool}.
  */
 public class TestJsonToFromBinaryFragmentTools {
   private static final String STRING_SCHEMA = Schema.create(Type.STRING).toString();
   private static final String UTF8 = "utf-8";
-  private static final String AVRO = 
+  private static final String AVRO =
     "ZLong string implies readable length encoding.";
-  private static final String JSON = 
+  private static final String JSON =
     "\"Long string implies readable length encoding.\"\n";
 
   @Test
   public void testBinaryToJson() throws Exception {
     binaryToJson(AVRO, JSON, STRING_SCHEMA);
   }
-  
+
   @Test
     public void testJsonToBinary() throws Exception {
     jsonToBinary(JSON, AVRO, STRING_SCHEMA);
@@ -82,7 +82,7 @@
   public void testBinaryToJsonSchemaFile() throws Exception {
     binaryToJson(AVRO, JSON, "--schema-file", schemaFile());
   }
-  
+
   @Test
     public void testJsonToBinarySchemaFile() throws Exception {
     jsonToBinary(JSON, AVRO, "--schema-file", schemaFile());
@@ -91,7 +91,7 @@
   private void binaryToJson(String avro, String json, String... options) throws Exception {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     PrintStream p = new PrintStream(new BufferedOutputStream(baos));
-    
+
     List<String> args = new ArrayList<String>();
     args.addAll(Arrays.asList(options));
     args.add("-");
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestRpcReceiveAndSendTools.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestRpcReceiveAndSendTools.java
index 97e527d..7e38198 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestRpcReceiveAndSendTools.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestRpcReceiveAndSendTools.java
@@ -27,7 +27,7 @@
 import org.junit.Test;
 
 public class TestRpcReceiveAndSendTools {
-  
+
   /**
    * Starts a server (using the tool) and sends a single message to it.
    */
@@ -38,7 +38,7 @@
     ByteArrayOutputStream baos1 = new ByteArrayOutputStream();
     PrintStream p1 = new PrintStream(baos1);
     RpcReceiveTool receive = new RpcReceiveTool();
-    receive.run1(null, p1, System.err, 
+    receive.run1(null, p1, System.err,
                  Arrays.asList("http://0.0.0.0:0/",
                                protocolFile, "hello",
                                "-data", "\"Hello!\""));
@@ -47,10 +47,10 @@
     RpcSendTool send = new RpcSendTool();
     send.run(null, p2, System.err,
              Arrays.asList("http://127.0.0.1:"+receive.server.getPort()+"/",
-                           protocolFile, "hello",  
+                           protocolFile, "hello",
                            "-data", "{ \"greeting\": \"Hi!\" }"));
     receive.run2(System.err);
-    
+
     assertTrue(baos1.toString("UTF-8").replace("\r", "")
                .endsWith("hello\t{\"greeting\":\"Hi!\"}\n"));
     assertEquals("\"Hello!\"\n", baos2.toString("UTF-8").replace("\r", ""));
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java
index eee4027..0e3c5e8 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java
@@ -57,7 +57,7 @@
   static ByteBuffer[] lines;
   static Schema schema;
   static File schemaFile;
-  
+
   @BeforeClass
   public static void writeRandomFile() throws IOException {
     schema = Schema.create(Type.BYTES);
@@ -82,7 +82,7 @@
     }
     out.close();
   }
-  
+
   private void fromText(String name, String... args) throws Exception {
     File avroFile = AvroTestUtil.tempFile(getClass(), name + ".avro");
 
@@ -92,7 +92,7 @@
     arglist.add(avroFile.toString());
 
     new FromTextTool().run(null, null, null, arglist);
-    
+
     // Read it back, and make sure it's valid.
     DataFileReader<ByteBuffer> file = new DataFileReader<ByteBuffer>
       (avroFile, new GenericDatumReader<ByteBuffer>());
@@ -104,7 +104,7 @@
     }
     assertEquals(COUNT, i);
   }
-  
+
   @Test
   public void testFromText() throws Exception {
     fromText("null", "--codec", "null");
@@ -128,7 +128,7 @@
     arglist.add(outFile.toString());
 
     new ToTextTool().run(null, null, null, arglist);
-    
+
     // Read it back, and make sure it's valid.
     InputStream orig = new BufferedInputStream(new FileInputStream(linesFile));
     InputStream after = new BufferedInputStream(new FileInputStream(outFile));
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestToTrevniTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestToTrevniTool.java
index 31f1ab6..164acaf 100644
--- a/lang/java/tools/src/test/java/org/apache/avro/tool/TestToTrevniTool.java
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestToTrevniTool.java
@@ -50,7 +50,7 @@
     new ToTrevniTool().run(null, p, null, Arrays.asList(args));
     return baos.toString("UTF-8").replace("\r", "");
   }
-  
+
   @Test
   public void test() throws Exception {
     Schema schema = Schema.parse(SCHEMA_FILE);
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnReader.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnReader.java
index f7514db..97ee485 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnReader.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnReader.java
@@ -50,7 +50,7 @@
   private GenericData model;
   private Schema fileSchema;
   private Schema readSchema;
-  
+
   private ColumnValues[] values;
   private int[] arrayWidths;
   private int column;                          // current index in values
@@ -125,16 +125,16 @@
     switch (read.getType()) {
     case NULL: case BOOLEAN:
     case INT: case LONG:
-    case FLOAT: case DOUBLE: 
-    case BYTES: case STRING: 
+    case FLOAT: case DOUBLE:
+    case BYTES: case STRING:
     case ENUM: case FIXED:
       if (read.getType() != write.getType())
         throw new TrevniRuntimeException("Type mismatch: "+read+" & "+write);
       break;
-    case MAP: 
+    case MAP:
       findDefaults(read.getValueType(), write.getValueType());
       break;
-    case ARRAY: 
+    case ARRAY:
       findDefaults(read.getElementType(), write.getElementType());
       break;
     case UNION:
@@ -145,7 +145,7 @@
         findDefaults(s, write.getTypes().get(index));
       }
       break;
-    case RECORD: 
+    case RECORD:
       for (Field f : read.getFields()) {
         Field g = write.getField(f.name());
         if (g == null)
@@ -200,7 +200,7 @@
     final int startColumn = column;
 
     switch (s.getType()) {
-    case MAP: 
+    case MAP:
       int size = values[column].nextLength();
       Map map = (Map)new HashMap(size);
       for (int i = 0; i < size; i++) {
@@ -211,7 +211,7 @@
       }
       column = startColumn + arrayWidths[startColumn];
       return map;
-    case RECORD: 
+    case RECORD:
       Object record = model.newRecord(null, s);
       Map<String,Object> rDefaults = defaults.get(s.getFullName());
       for (Field f : s.getFields()) {
@@ -221,7 +221,7 @@
         model.setField(record, f.name(), f.pos(), value);
       }
       return record;
-    case ARRAY: 
+    case ARRAY:
       int length = values[column].nextLength();
       List elements = (List)new GenericData.Array(length, s);
       for (int i = 0; i < length; i++) {
@@ -254,7 +254,7 @@
 
   private Object nextValue(Schema s, int column) throws IOException {
     Object v = values[column].nextValue();
-    
+
     switch (s.getType()) {
     case ENUM:
       return model.createEnum(s.getEnumSymbols().get((Integer)v), s);
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnWriter.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnWriter.java
index abb5682..caf7fd5 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnWriter.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnWriter.java
@@ -89,14 +89,14 @@
     assert(count == writer.getColumnCount());
     writer.endRow();
   }
-  
+
   private int write(Object o, Schema s, int column) throws IOException {
     if (isSimple(s)) {
       writeValue(o, s, column);
       return column+1;
     }
     switch (s.getType()) {
-    case MAP: 
+    case MAP:
       Map<?,?> map = (Map)o;
       writer.writeLength(map.size(), column);
       for (Map.Entry e : map.entrySet()) {
@@ -106,11 +106,11 @@
         assert(c == column+arrayWidths[column]);
       }
       return column+arrayWidths[column];
-    case RECORD: 
+    case RECORD:
       for (Field f : s.getFields())
         column = write(model.getField(o,f.name(),f.pos()), f.schema(), column);
       return column;
-    case ARRAY: 
+    case ARRAY:
       Collection elements = (Collection)o;
       writer.writeLength(elements.size(), column);
       if (isSimple(s.getElementType())) {         // optimize simple arrays
@@ -151,7 +151,7 @@
 
   private void writeValue(Object value, Schema s, int column)
     throws IOException {
-    
+
     switch (s.getType()) {
     case STRING:
       if (value instanceof Utf8)                    // convert Utf8 to String
@@ -160,7 +160,7 @@
     case ENUM:
       if (value instanceof Enum)
         value = ((Enum)value).ordinal();
-      else 
+      else
         value = s.getEnumOrdinal(value.toString());
       break;
     case FIXED:
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnator.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnator.java
index 2f9a3ef..a546c14 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnator.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnator.java
@@ -71,9 +71,9 @@
     if (seen.containsKey(s))                      // catch recursion
       throw new TrevniRuntimeException("Cannot shred recursive schemas: "+s);
     seen.put(s, s);
-    
+
     switch (s.getType()) {
-    case MAP: 
+    case MAP:
       path = path == null ? ">" : path+">";
       int start = columns.size();
       ColumnMetaData p = addColumn(path, ValueType.NULL, parent, true);
@@ -85,7 +85,7 @@
       for (Field field : s.getFields())           // flatten fields to columns
         columnize(p(path, field.name(), "#"), field.schema(), parent, isArray);
       break;
-    case ARRAY: 
+    case ARRAY:
       path = path == null ? "[]" : path+"[]";
       addArrayColumn(path, s.getElementType(), parent);
       break;
@@ -131,7 +131,7 @@
     // complex array: insert a parent column with lengths
     int start = columns.size();
     ColumnMetaData array = addColumn(path, ValueType.NULL, parent, true);
-    columnize(path, element, array, false); 
+    columnize(path, element, array, false);
     arrayWidths.set(start, columns.size()-start); // fixup with actual width
   }
 
@@ -139,8 +139,8 @@
     switch (s.getType()) {
     case NULL: case BOOLEAN:
     case INT: case LONG:
-    case FLOAT: case DOUBLE: 
-    case BYTES: case STRING: 
+    case FLOAT: case DOUBLE:
+    case BYTES: case STRING:
     case ENUM: case FIXED:
       return true;
     default:
@@ -165,4 +165,4 @@
     }
   }
 
-}    
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniInputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniInputFormat.java
index 47bec01..003c266 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniInputFormat.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniInputFormat.java
@@ -79,9 +79,9 @@
       private long row;
 
       public AvroWrapper<T> createKey() { return new AvroWrapper<T>(null); }
-  
+
       public NullWritable createValue() { return NullWritable.get(); }
-    
+
       public boolean next(AvroWrapper<T> wrapper, NullWritable ignore)
         throws IOException {
         if (!reader.hasNext())
@@ -90,13 +90,13 @@
         row++;
         return true;
       }
-  
+
       public float getProgress() throws IOException { return row / rows; }
-  
+
       public long getPos() throws IOException { return row; }
 
       public void close() throws IOException { reader.close(); }
-  
+
     };
 
   }
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java
index 60b432b..ca71107 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java
@@ -52,7 +52,7 @@
 
   /** The file name extension for trevni files. */
   public final static String EXT = ".trv";
-  
+
   public static final String META_PREFIX = "trevni.meta.";
 
   /** Add metadata to job output files.*/
@@ -84,7 +84,7 @@
 
       private AvroColumnWriter<T> writer =
         new AvroColumnWriter<T>(schema, meta, ReflectData.get());
-    
+
       private void flush() throws IOException {
         OutputStream out = fs.create(new Path(dir, "part-"+(part++)+EXT));
         try {
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyInputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyInputFormat.java
index 89287e6..30d1682 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyInputFormat.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyInputFormat.java
@@ -29,28 +29,28 @@
 
 /**
  * An {@link org.apache.hadoop.mapreduce.InputFormat} for Trevni files.
- * 
+ *
  * This implement was modeled off
  * {@link org.apache.avro.mapreduce.AvroKeyInputFormat} to allow for easy
  * transition
- * 
+ *
  * A MapReduce InputFormat that can handle Trevni container files.
  *
  * <p>Keys are AvroKey wrapper objects that contain the Trevni data.  Since Trevni
  * container files store only records (not key/value pairs), the value from
  * this InputFormat is a NullWritable.</p>
- * 
+ *
  * <p>
  * A subset schema to be read may be specified with
  * {@link org.apache.avro.mapreduce.AvroJob#setInputKeySchema}.
  */
 public class AvroTrevniKeyInputFormat<T> extends FileInputFormat<AvroKey<T>, NullWritable> {
-  
+
   @Override
   public RecordReader<AvroKey<T>, NullWritable> createRecordReader(
       InputSplit split, TaskAttemptContext context) throws IOException,
       InterruptedException {
-    
+
     return new AvroTrevniKeyRecordReader<T>();
   }
 
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyOutputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyOutputFormat.java
index 34354f7..cc1f48f 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyOutputFormat.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyOutputFormat.java
@@ -29,17 +29,17 @@
 /** An {@link org.apache.hadoop.mapreduce.OutputFormat} that writes Avro data to
  * Trevni files.
  *
- * This implement was modeled off 
+ * This implement was modeled off
  * {@link org.apache.avro.mapreduce.AvroKeyOutputFormat} to allow for easy
  * transition
- * 
+ *
  * FileOutputFormat for writing Trevni container files.
  *
  * <p>Since Trevni container files only contain records (not key/value pairs), this output
  * format ignores the value.</p>
  *
  * @param <T> The (java) type of the Trevni data to write.
- * 
+ *
  * <p>Writes a directory of files per task, each comprising a single filesystem
  * block.  To reduce the number of files, increase the default filesystem block
  * size for the job.  Each task also requires enough memory to buffer a
@@ -50,7 +50,7 @@
   @Override
   public RecordWriter<AvroKey<T>, NullWritable> getRecordWriter(TaskAttemptContext context)
       throws IOException, InterruptedException {
-    
+
     return new AvroTrevniKeyRecordWriter<T>(context );
   }
 }
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordReader.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordReader.java
index 88f2410..4751983 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordReader.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordReader.java
@@ -29,10 +29,10 @@
  * @param <T> The (java) type of data in Trevni container file.
  */
 public class AvroTrevniKeyRecordReader<T> extends AvroTrevniRecordReaderBase<AvroKey<T>, NullWritable, T> {
-  
+
   /** A reusable object to hold records of the Avro container file. */
   private final AvroKey<T> mCurrentKey = new AvroKey<T>();
-  
+
   /** {@inheritDoc} */
   @Override
   public AvroKey<T> getCurrentKey() throws IOException,
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueInputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueInputFormat.java
index c16e381..2fb1b33 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueInputFormat.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueInputFormat.java
@@ -28,11 +28,11 @@
 
 /**
  * An {@link org.apache.hadoop.mapreduce.InputFormat} for Trevni files.
- * 
+ *
  * This implement was modeled off
  * {@link org.apache.avro.mapreduce.AvroKeyValueInputFormat} to allow for easy
  * transition
- * 
+ *
  * <p>
  * A MapReduce InputFormat that reads from Trevni container files of key/value generic records.
  *
@@ -43,7 +43,7 @@
  *
  * @param <K> The type of the Trevni key to read.
  * @param <V> The type of the Trevni value to read.
- * 
+ *
  * <p>
  * A subset schema to be read may be specified with
  * {@link org.apache.avro.mapreduce.AvroJob#setInputKeySchema} and
@@ -56,7 +56,7 @@
   public RecordReader<AvroKey<K>, AvroValue<V>> createRecordReader(
       InputSplit split, TaskAttemptContext context) throws IOException,
       InterruptedException {
-    
+
     return new AvroTrevniKeyValueRecordReader<K, V>();
   }
 
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueOutputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueOutputFormat.java
index c508df3..9f6b23f 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueOutputFormat.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueOutputFormat.java
@@ -28,11 +28,11 @@
 
 /** An {@link org.apache.hadoop.mapreduce.OutputFormat} that writes Avro data to
  * Trevni files.
- * 
- * This implement was modeled off 
+ *
+ * This implement was modeled off
  * {@link org.apache.avro.mapreduce.AvroKeyValueOutputFormat} to allow for easy
- * transition 
- * 
+ * transition
+ *
  *  * FileOutputFormat for writing Trevni container files of key/value pairs.
  *
  * <p>Since Trevni container files can only contain records (not key/value pairs), this
@@ -46,19 +46,19 @@
  *
  * @param <K> The type of key. If an Avro type, it must be wrapped in an <code>AvroKey</code>.
  * @param <V> The type of value. If an Avro type, it must be wrapped in an <code>AvroValue</code>.
- * 
+ *
  * <p>Writes a directory of files per task, each comprising a single filesystem
  * block.  To reduce the number of files, increase the default filesystem block
  * size for the job.  Each task also requires enough memory to buffer a
  * filesystem block.
  */
-public class AvroTrevniKeyValueOutputFormat <K, V> extends FileOutputFormat<AvroKey<K>, AvroValue<V>> { 
-  
+public class AvroTrevniKeyValueOutputFormat <K, V> extends FileOutputFormat<AvroKey<K>, AvroValue<V>> {
+
   /** {@inheritDoc} */
   @Override
   public RecordWriter<AvroKey<K>, AvroValue<V>> getRecordWriter(TaskAttemptContext context)
       throws IOException, InterruptedException {
-    
+
     return new AvroTrevniKeyValueRecordWriter<K, V>(context );
   }
 }
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordReader.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordReader.java
index 3aa956f..31fe3f4 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordReader.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordReader.java
@@ -41,7 +41,7 @@
   private final AvroKey<K> mCurrentKey = new AvroKey<K>();
   /** The current value the reader is on. */
   private final AvroValue<V> mCurrentValue = new AvroValue<V>();
-  
+
   /** {@inheritDoc} */
   @Override
   public AvroKey<K> getCurrentKey() throws IOException,
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordWriter.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordWriter.java
index 136ef06..9038028 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordWriter.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordWriter.java
@@ -46,16 +46,16 @@
 
   /** The writer schema for the generic record entries of the Trevni container file. */
   Schema mKeyValuePairSchema;
-  
+
   /** A reusable Avro generic record for writing key/value pairs to the file. */
   AvroKeyValue<Object, Object> keyValueRecord;
-  
+
   /** A helper object that converts the input key to an Avro datum. */
   AvroDatumConverter<K, ?> keyConverter;
-  
+
   /** A helper object that converts the input value to an Avro datum. */
   AvroDatumConverter<V, ?> valueConverter;
-    
+
   /**
    * Constructor.
    * @param context The TaskAttempContext to supply the writer with information form the job configuration
@@ -63,30 +63,30 @@
   public AvroTrevniKeyValueRecordWriter(TaskAttemptContext context)
       throws IOException {
     super(context);
-    
+
     mKeyValuePairSchema = initSchema(context);
     keyValueRecord  = new AvroKeyValue<Object, Object>(new GenericData.Record(mKeyValuePairSchema));
   }
-  
+
   /** {@inheritDoc} */
   @Override
   public void write(AvroKey<K> key, AvroValue<V> value) throws IOException,
       InterruptedException {
-    
+
     keyValueRecord.setKey(key.datum());
     keyValueRecord.setValue(value.datum());
     writer.write(keyValueRecord.get());
     if (writer.sizeEstimate() >= blockSize) // block full
       flush();
   }
-  
+
   /** {@inheritDoc} */
   @SuppressWarnings("unchecked")
   @Override
   protected Schema initSchema(TaskAttemptContext context) {
     AvroDatumConverterFactory converterFactory = new AvroDatumConverterFactory(
         context.getConfiguration());
-    
+
     keyConverter = converterFactory.create((Class<K>) context
         .getOutputKeyClass());
     valueConverter = converterFactory.create((Class<V>) context
@@ -95,7 +95,7 @@
     // Create the generic record schema for the key/value pair.
     return AvroKeyValue.getSchema(
         keyConverter.getWriterSchema(), valueConverter.getWriterSchema());
-    
+
   }
-  
+
 }
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordReaderBase.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordReaderBase.java
index b68669f..1a3114f 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordReaderBase.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordReaderBase.java
@@ -37,16 +37,16 @@
  * @param <T> The type of the entries within the Trevni container file being read.
  */
 public abstract class AvroTrevniRecordReaderBase<K, V, T> extends RecordReader<K, V> {
-  
+
   /** The Trevni file reader */
   private AvroColumnReader<T> reader;
-  
+
   /** Number of rows in the Trevni file */
   private float rows;
-  
+
   /** The current row number being read in */
   private long row;
-  
+
   /** A reusable object to hold records of the Avro container file. */
   private T mCurrentRecord;
 
@@ -60,11 +60,11 @@
     final AvroColumnReader.Params params =
       new AvroColumnReader.Params(new HadoopInput(file.getPath(), context.getConfiguration()));
     params.setModel(ReflectData.get());
-    
+
     if (AvroJob.getInputKeySchema(context.getConfiguration()) != null) {
       params.setSchema(AvroJob.getInputKeySchema(context.getConfiguration()));
     }
-    
+
     reader = new AvroColumnReader<T>(params);
     rows = reader.getRowCount();
   }
@@ -78,7 +78,7 @@
     row++;
     return true;
   }
-  
+
   /**
    * Gets the current record read from the Trevni container file.
    *
@@ -93,7 +93,7 @@
   /** {@inheritDoc} */
   @Override
   public void close() throws IOException {
-    reader.close(); 
+    reader.close();
   }
 
   /** {@inheritDoc} */
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
index 94a332d..7f1d57b 100644
--- a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
@@ -43,15 +43,15 @@
  * @param <T> The type of the entries within the Trevni container file being writen.
  */
 public abstract class AvroTrevniRecordWriterBase<K,V, T> extends RecordWriter<K, V> {
-  
+
   /** trevni file extension */
   public final static String EXT = ".trv";
-  
+
   /** prefix of job configs that we care about */
   public static final String META_PREFIX = "trevni.meta.";
-  
-  /** Counter that increments as new trevni files are create because the current file 
-   * has exceeded the block size 
+
+  /** Counter that increments as new trevni files are create because the current file
+   * has exceeded the block size
    * */
   protected int part = 0;
 
@@ -60,31 +60,31 @@
 
   /** This will be a unique directory linked to the task */
   final Path dirPath;
-  
+
   /** HDFS object */
   final FileSystem fs;
 
   /** Current configured blocksize */
   final long blockSize;
-  
+
   /** Provided avro schema from the context */
   protected Schema schema;
-  
+
   /** meta data to be stored in the output file.  */
   protected ColumnFileMetaData meta;
-  
+
   /**
    * Constructor.
    * @param context The TaskAttempContext to supply the writer with information form the job configuration
    */
   public AvroTrevniRecordWriterBase(TaskAttemptContext context) throws IOException {
-    
+
     schema = initSchema(context);
     meta = filterMetadata(context.getConfiguration());
     writer = new AvroColumnWriter<T>(schema, meta, ReflectData.get());
 
     Path outputPath = FileOutputFormat.getOutputPath(context);
-    
+
     String dir = FileOutputFormat.getUniqueFile(context, "part", "");
     dirPath = new Path(outputPath.toString() + "/" + dir);
     fs = dirPath.getFileSystem(context.getConfiguration());
@@ -97,8 +97,8 @@
    * Use the task context to construct a schema for writing
    * @throws IOException
    */
-  abstract protected  Schema initSchema(TaskAttemptContext context); 
-  
+  abstract protected  Schema initSchema(TaskAttemptContext context);
+
   /**
    * A Trevni flush will close the current file and prep a new writer
    * @throws IOException
@@ -112,14 +112,14 @@
     }
     writer = new AvroColumnWriter<T>(schema, meta, ReflectData.get());
   }
-  
+
   /** {@inheritDoc} */
   @Override
   public void close(TaskAttemptContext arg0) throws IOException,
       InterruptedException {
     flush();
   }
-  
+
   static ColumnFileMetaData filterMetadata(final Configuration configuration) {
     final ColumnFileMetaData meta = new ColumnFileMetaData();
     Iterator<Entry<String, String>> keyIterator = configuration.iterator();
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/RandomData.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/RandomData.java
index 8f5976a..5d43aae 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/RandomData.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/RandomData.java
@@ -45,7 +45,7 @@
     this.root = schema;
     this.count = count;
   }
-  
+
   public Iterator<Object> iterator() {
     return new Iterator<Object>() {
       private int n;
@@ -58,14 +58,14 @@
       public void remove() { throw new UnsupportedOperationException(); }
     };
   }
-  
+
   @SuppressWarnings(value="unchecked")
   private static Object generate(Schema schema, Random random, int d) {
     switch (schema.getType()) {
     case RECORD:
       GenericRecord record = new GenericData.Record(schema);
       for (Schema.Field field : schema.getFields()) {
-        Object value = (field.getJsonProp(USE_DEFAULT) == null) 
+        Object value = (field.getJsonProp(USE_DEFAULT) == null)
           ? generate(field.schema(), random, d+1)
           : GenericData.get().getDefaultValue(field);
         record.put(field.name(), value);
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestMetadataFiltering.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestMetadataFiltering.java
index 920eb89..e3b9507 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestMetadataFiltering.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestMetadataFiltering.java
@@ -27,15 +27,15 @@
 
   @Test public void testMetadataFiltering() throws Exception {
     JobConf job = new JobConf();
-    
+
     job.set(AvroTrevniOutputFormat.META_PREFIX + "test1", "1");
     job.set(AvroTrevniOutputFormat.META_PREFIX + "test2", "2");
     job.set("test3", "3");
     job.set(AvroJob.TEXT_PREFIX + "test4", "4");
     job.set(AvroTrevniOutputFormat.META_PREFIX + "test5", "5");
-    
+
     ColumnFileMetaData metadata = AvroTrevniOutputFormat.filterMetadata(job);
-    
+
     assertTrue(metadata.get("test1") != null);
     assertTrue(new String(metadata.get("test1")).equals("1"));
     assertTrue(metadata.get("test2") != null);
@@ -44,7 +44,7 @@
     assertTrue(new String(metadata.get("test5")).equals("5"));
     assertTrue(metadata.get("test3") == null);
     assertTrue(metadata.get("test4") == null);
-    
+
   }
-  
+
 }
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestShredder.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestShredder.java
index 06fdd09..9ca7344 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestShredder.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestShredder.java
@@ -78,7 +78,7 @@
   }
 
   @Test public void testDefaultValue() throws Exception {
-    String s = 
+    String s =
       "{\"type\":\"record\",\"name\":\"R\",\"fields\":["
       +SIMPLE_FIELDS+","
       +"{\"name\":\"z\",\"type\":\"int\","
@@ -89,7 +89,7 @@
   }
 
   @Test public void testNestedRecord() throws Exception {
-    String s = 
+    String s =
       "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
       +"{\"name\":\"x\",\"type\":\"int\"},"
       +"{\"name\":\"R\",\"type\":"+SIMPLE_RECORD+"},"
@@ -103,25 +103,25 @@
   }
 
   @Test public void testNamedRecord() throws Exception {
-	    String s = 
-	      "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
-	      +"{\"name\":\"R1\",\"type\":"+SIMPLE_RECORD+"},"
-	      +"{\"name\":\"R2\",\"type\":\"R\"}"
-	      +"]}";
-	    check(Schema.parse(s),
-	          new ColumnMetaData("R1#x", ValueType.INT),
-	          new ColumnMetaData("R1#y", ValueType.STRING),
-	          new ColumnMetaData("R2#x", ValueType.INT),
-	          new ColumnMetaData("R2#y", ValueType.STRING));
-	  }
-  
+    String s =
+      "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
+      +"{\"name\":\"R1\",\"type\":"+SIMPLE_RECORD+"},"
+      +"{\"name\":\"R2\",\"type\":\"R\"}"
+      +"]}";
+    check(Schema.parse(s),
+      new ColumnMetaData("R1#x", ValueType.INT),
+      new ColumnMetaData("R1#y", ValueType.STRING),
+      new ColumnMetaData("R2#x", ValueType.INT),
+      new ColumnMetaData("R2#y", ValueType.STRING));
+  }
+
   @Test public void testSimpleArray() throws Exception {
     String s = "{\"type\":\"array\",\"items\":\"long\"}";
     check(Schema.parse(s),
           new ColumnMetaData("[]", ValueType.LONG).isArray(true));
   }
 
-  private static final String RECORD_ARRAY = 
+  private static final String RECORD_ARRAY =
     "{\"type\":\"array\",\"items\":"+SIMPLE_RECORD+"}";
 
   @Test public void testArray() throws Exception {
@@ -157,7 +157,7 @@
   }
 
   @Test public void testNestedArray() throws Exception {
-    String s = 
+    String s =
       "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
       +"{\"name\":\"x\",\"type\":\"int\"},"
       +"{\"name\":\"A\",\"type\":"+RECORD_ARRAY+"},"
@@ -173,7 +173,7 @@
   }
 
   @Test public void testNestedUnion() throws Exception {
-    String s = 
+    String s =
       "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
       +"{\"name\":\"x\",\"type\":\"int\"},"
       +"{\"name\":\"u\",\"type\":"+UNION+"},"
@@ -190,7 +190,7 @@
   }
 
   @Test public void testUnionInArray() throws Exception {
-    String s = 
+    String s =
       "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
       +"{\"name\":\"a\",\"type\":{\"type\":\"array\",\"items\":"+UNION+"}}"
       +"]}";
@@ -209,7 +209,7 @@
   }
 
   @Test public void testArrayInUnion() throws Exception {
-    String s = 
+    String s =
       "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
       +"{\"name\":\"a\",\"type\":[\"int\","+RECORD_ARRAY+"]}]}";
     ColumnMetaData q = new ColumnMetaData("a/array",ValueType.NULL)
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestWordCount.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestWordCount.java
index d928a9f..79a8fff 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestWordCount.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestWordCount.java
@@ -62,7 +62,7 @@
         collector.collect(new Pair<String,Long>(tokens.nextToken(),1L));
     }
   }
-  
+
   public static class ReduceImpl
     extends AvroReducer<String, Long, Pair<String, Long> > {
     @Override
@@ -74,7 +74,7 @@
         sum += count;
       collector.collect(new Pair<String,Long>(word, sum));
     }
-  }    
+  }
 
   @Test public void runTestsInOrder() throws Exception {
     testOutputFormat();
@@ -87,26 +87,26 @@
 
   public void testOutputFormat() throws Exception {
     JobConf job = new JobConf();
-    
+
     WordCountUtil wordCountUtil = new WordCountUtil("trevniMapredTest");
-    
+
     wordCountUtil.writeLinesFile();
-    
+
     AvroJob.setInputSchema(job, STRING);
     AvroJob.setOutputSchema(job, Pair.getPairSchema(STRING,LONG));
-    
-    AvroJob.setMapperClass(job, MapImpl.class);        
+
+    AvroJob.setMapperClass(job, MapImpl.class);
     AvroJob.setCombinerClass(job, ReduceImpl.class);
     AvroJob.setReducerClass(job, ReduceImpl.class);
-    
+
     FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/in"));
     FileOutputFormat.setOutputPath(job, new Path(wordCountUtil.getDir().toString() + "/out"));
     FileOutputFormat.setCompressOutput(job, true);
-    
+
     job.setOutputFormat(AvroTrevniOutputFormat.class);
 
     JobClient.runJob(job);
-    
+
     wordCountUtil.validateCountsFile();
   }
 
@@ -118,20 +118,20 @@
       total += (Long)r.get("value");
     }
   }
-  
+
   public void testInputFormat() throws Exception {
     JobConf job = new JobConf();
 
     WordCountUtil wordCountUtil = new WordCountUtil("trevniMapredTest");
-    
-    
+
+
     Schema subSchema = Schema.parse("{\"type\":\"record\"," +
                                     "\"name\":\"PairValue\","+
-                                    "\"fields\": [ " + 
-                                    "{\"name\":\"value\", \"type\":\"long\"}" + 
+                                    "\"fields\": [ " +
+                                    "{\"name\":\"value\", \"type\":\"long\"}" +
                                     "]}");
     AvroJob.setInputSchema(job, subSchema);
-    AvroJob.setMapperClass(job, Counter.class);        
+    AvroJob.setMapperClass(job, Counter.class);
     FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/out/*"));
     job.setInputFormat(AvroTrevniInputFormat.class);
 
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/WordCountUtil.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/WordCountUtil.java
index 68af7a3..775992a 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/WordCountUtil.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/WordCountUtil.java
@@ -60,13 +60,13 @@
   public WordCountUtil (String testName) {
     this(testName, "part-00000");
   }
-  
+
   public WordCountUtil (String testName, String partDirName) {
     dir = new File("target/wc", testName);
     linesFiles = new File(new File(dir, "in"), "lines.avro");
     countFiles = new File(new File(dir, "out"), partDirName + "/part-0.trv");
   }
-  
+
   public static final String[] LINES = new String[] {
     "the quick brown fox jumps over the lazy dog",
     "the cow jumps over the moon",
@@ -93,7 +93,7 @@
   public File getDir() {
     return dir;
   }
-  
+
   public void writeLinesFile() throws IOException {
     FileUtil.fullyDelete(dir);
     DatumWriter<String> writer = new GenericDatumWriter<String>();
@@ -117,7 +117,7 @@
     reader.close();
     assertEquals(COUNTS.size(), numWords);
   }
-  
+
   public void validateCountsFileGenericRecord() throws Exception {
     AvroColumnReader<GenericRecord > reader =
       new AvroColumnReader<GenericRecord >
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyValueWordCount.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyValueWordCount.java
index deea1ca..47cbee4 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyValueWordCount.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyValueWordCount.java
@@ -43,13 +43,13 @@
 import org.junit.Test;
 
 public class TestKeyValueWordCount {
-  
+
   private static long total = 0;
 
   static final Schema STRING = Schema.create(Schema.Type.STRING);
   static { GenericData.setStringType(STRING, GenericData.StringType.String); }
   static final Schema LONG = Schema.create(Schema.Type.LONG);
-  
+
   private static class WordCountMapper extends
       Mapper<AvroKey<String>, NullWritable, Text, LongWritable> {
     private LongWritable mCount = new LongWritable();
@@ -76,12 +76,12 @@
 
     }
   }
-  
+
   private static class WordCountReducer extends Reducer< Text, LongWritable, AvroKey<String>, AvroValue<Long>> {
-    
+
     AvroKey<String> resultKey = new AvroKey<String>();
     AvroValue<Long> resultValue = new AvroValue<Long>();
-    
+
     @Override
     protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
       long sum = 0;
@@ -90,11 +90,11 @@
       }
       resultKey.datum(key.toString());
       resultValue.datum(sum);
-      
+
       context.write(resultKey, resultValue);
     }
   }
-   
+
   public static class Counter extends
   Mapper<AvroKey<String>, AvroValue<Long>, NullWritable, NullWritable> {
     @Override
@@ -102,8 +102,8 @@
         throws IOException, InterruptedException {
       total += value.datum();
     }
-  }  
-  
+  }
+
   @Test public void testIOFormat() throws Exception {
     checkOutputFormat();
     checkInputFormat();
@@ -111,49 +111,49 @@
 
   public void checkOutputFormat() throws Exception {
     Job job = new Job();
-    
+
     WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyValueTest", "part-r-00000");
-     
+
     wordCountUtil.writeLinesFile();
-    
+
     AvroJob.setInputKeySchema(job, STRING);
     AvroJob.setOutputKeySchema(job, STRING);
     AvroJob.setOutputValueSchema(job, LONG);
-    
+
     job.setMapperClass(WordCountMapper.class);
     job.setReducerClass(WordCountReducer.class);
-    
+
     job.setMapOutputKeyClass(Text.class);
     job.setMapOutputValueClass(LongWritable.class);
-    
+
     FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/in"));
     FileOutputFormat.setOutputPath(job, new Path(wordCountUtil.getDir().toString() + "/out"));
     FileOutputFormat.setCompressOutput(job, true);
-    
+
     job.setInputFormatClass(AvroKeyInputFormat.class);
     job.setOutputFormatClass(AvroTrevniKeyValueOutputFormat.class);
 
     job.waitForCompletion(true);
-    
+
     wordCountUtil.validateCountsFileGenericRecord();
   }
-  
+
   public void checkInputFormat() throws Exception {
     Job job = new Job();
-    
+
     WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyValueTest");
-    
+
     job.setMapperClass(Counter.class);
-    
+
     FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/out/*"));
     job.setInputFormatClass(AvroTrevniKeyValueInputFormat.class);
-    
+
     job.setNumReduceTasks(0);
     job.setOutputFormatClass(NullOutputFormat.class);
-    
+
     total = 0;
     job.waitForCompletion(true);
     assertEquals(WordCountUtil.TOTAL, total);
-    
+
   }
 }
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyWordCount.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyWordCount.java
index 8623fb4..1ba50ff 100644
--- a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyWordCount.java
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyWordCount.java
@@ -45,14 +45,14 @@
 import org.junit.Test;
 
 public class TestKeyWordCount {
-  
+
   private static long total = 0;
 
   static final Schema STRING = Schema.create(Schema.Type.STRING);
   static { GenericData.setStringType(STRING, GenericData.StringType.String); }
   static final Schema LONG = Schema.create(Schema.Type.LONG);
-  
-  
+
+
   private static class WordCountMapper extends
       Mapper<AvroKey<String>, NullWritable, Text, LongWritable> {
     private LongWritable mCount = new LongWritable();
@@ -79,33 +79,33 @@
 
     }
   }
-  
+
   private static class WordCountReducer extends Reducer< Text, LongWritable, AvroKey<GenericData.Record>, NullWritable> {
-    
+
     private AvroKey<GenericData.Record> result ;
-    
+
     @Override
     protected void setup(Context context) {
       result = new AvroKey<GenericData.Record>();
       result.datum(new Record(Pair.getPairSchema(STRING,LONG)));
     }
-    
+
     @Override
     protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
       long count = 0;
       for (LongWritable value: values) {
         count += value.get();
       }
-      
+
       result.datum().put("key", key.toString());
       result.datum().put("value", count);
-      
+
       context.write(result, NullWritable.get());
     }
   }
-   
 
-  
+
+
   public static class Counter extends
   Mapper<AvroKey<GenericData.Record>, NullWritable, NullWritable, NullWritable> {
     @Override
@@ -114,8 +114,8 @@
       total += (Long)key.datum().get("value");
     }
   }
-  
-  
+
+
   @Test public void testIOFormat() throws Exception {
     checkOutputFormat();
     checkInputFormat();
@@ -123,56 +123,56 @@
 
   public void checkOutputFormat() throws Exception {
     Job job = new Job();
-    
+
     WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyTest", "part-r-00000");
-    
+
     wordCountUtil.writeLinesFile();
-    
+
     AvroJob.setInputKeySchema(job, STRING);
     AvroJob.setOutputKeySchema(job, Pair.getPairSchema(STRING,LONG));
-    
+
     job.setMapperClass(WordCountMapper.class);
     job.setReducerClass(WordCountReducer.class);
-    
+
     job.setMapOutputKeyClass(Text.class);
     job.setMapOutputValueClass(LongWritable.class);
-    
+
     FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/in"));
     FileOutputFormat.setOutputPath(job, new Path(wordCountUtil.getDir().toString() + "/out"));
     FileOutputFormat.setCompressOutput(job, true);
-    
+
     job.setInputFormatClass(AvroKeyInputFormat.class);
     job.setOutputFormatClass(AvroTrevniKeyOutputFormat.class);
 
     job.waitForCompletion(true);
-    
+
     wordCountUtil.validateCountsFile();
   }
-  
+
   public void checkInputFormat() throws Exception {
     Job job = new Job();
-    
+
     WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyTest");
-    
+
     job.setMapperClass(Counter.class);
 
     Schema subSchema = Schema.parse("{\"type\":\"record\"," +
                                     "\"name\":\"PairValue\","+
-                                    "\"fields\": [ " + 
-                                    "{\"name\":\"value\", \"type\":\"long\"}" + 
+                                    "\"fields\": [ " +
+                                    "{\"name\":\"value\", \"type\":\"long\"}" +
                                     "]}");
     AvroJob.setInputKeySchema(job, subSchema);
-    
+
     FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/out/*"));
     job.setInputFormatClass(AvroTrevniKeyInputFormat.class);
-    
+
     job.setNumReduceTasks(0);
     job.setOutputFormatClass(NullOutputFormat.class);
-    
+
     total = 0;
     job.waitForCompletion(true);
     assertEquals(WordCountUtil.TOTAL, total);
-    
+
   }
-  
+
 }
diff --git a/lang/java/trevni/core/pom.xml b/lang/java/trevni/core/pom.xml
index 8e96aa9..084b167 100644
--- a/lang/java/trevni/core/pom.xml
+++ b/lang/java/trevni/core/pom.xml
@@ -37,7 +37,7 @@
       <artifactId>snappy-java</artifactId>
       <version>${snappy.version}</version>
       <scope>compile</scope>
-    </dependency>    
+    </dependency>
     <dependency>
       <groupId>org.apache.commons</groupId>
       <artifactId>commons-compress</artifactId>
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/BZip2Codec.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/BZip2Codec.java
index 1c0d64b..9513ed4 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/BZip2Codec.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/BZip2Codec.java
@@ -29,7 +29,7 @@
 
   private ByteArrayOutputStream outputBuffer;
   public static final int DEFAULT_BUFFER_SIZE = 64 * 1024;
-  
+
   @Override
   ByteBuffer compress(ByteBuffer uncompressedData) throws IOException {
     ByteArrayOutputStream baos = getOutputBuffer(uncompressedData.remaining());
@@ -55,23 +55,23 @@
       byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
 
       int readCount = -1;
-      
+
       while ( (readCount = inputStream.read(buffer, compressedData.position(), buffer.length))> 0) {
         baos.write(buffer, 0, readCount);
       }
-      
+
       ByteBuffer result = ByteBuffer.wrap(baos.toByteArray());
       return result;
     } finally {
       inputStream.close();
     }
   }
-  
+
   private ByteArrayOutputStream getOutputBuffer(int suggestedLength) {
     if (null == outputBuffer)
       outputBuffer = new ByteArrayOutputStream(suggestedLength);
     outputBuffer.reset();
     return outputBuffer;
   }
-  
+
 }
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/BlockDescriptor.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/BlockDescriptor.java
index 5f2c0f3..cbb15bd 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/BlockDescriptor.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/BlockDescriptor.java
@@ -25,13 +25,13 @@
   int compressedSize;
 
   BlockDescriptor() {}
-  
+
   BlockDescriptor(int rowCount, int uncompressedSize, int compressedSize) {
     this.rowCount = rowCount;
     this.uncompressedSize = uncompressedSize;
     this.compressedSize = compressedSize;
   }
-  
+
   public void writeTo(OutputBuffer out) throws IOException {
     out.writeFixed32(rowCount);
     out.writeFixed32(uncompressedSize);
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnDescriptor.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnDescriptor.java
index 6a052d1..3cbabac 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnDescriptor.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnDescriptor.java
@@ -76,7 +76,7 @@
         firstValues[i] = in.<T>readValue(metaData.getType());
     }
     dataStart = in.tell();
-    
+
     // compute blockStarts and firstRows
     Checksum checksum = Checksum.get(metaData);
     blockStarts = new long[blocks.length];
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileReader.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileReader.java
index ec080b8..cb475f5 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileReader.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileReader.java
@@ -131,7 +131,7 @@
     for (int i = 0; i < columnCount; i++)
       columns[i].start = in.readFixed64();
   }
- 
+
   /** Return an iterator over values in the named column. */
   public <T extends Comparable> ColumnValues<T> getValues(String columnName)
     throws IOException {
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileWriter.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileWriter.java
index 0f4a21e..5eb92aa 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileWriter.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileWriter.java
@@ -68,7 +68,7 @@
       if (parent != null && !seen.contains(parent.getName()))
         throw new TrevniRuntimeException("Parent must precede child: "+name);
       seen.add(name);
-    }          
+    }
   }
 
   void incrementSize(int n) { size += n; }
@@ -133,7 +133,7 @@
   /** Write all rows added to the named output stream. */
   public void writeTo(OutputStream out) throws IOException {
     writeHeader(out);
-    
+
     for (int column = 0; column < columnCount; column++)
       columns[column].writeTo(out);
   }
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnOutputBuffer.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnOutputBuffer.java
index b689915..3217753 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnOutputBuffer.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnOutputBuffer.java
@@ -90,9 +90,9 @@
          ? firstValues.get(firstValues.size()-1).length
          : 0)
       + data.position();                         // data
-    
+
     writer.incrementSize(sizeIncrement);
-    size += sizeIncrement;                         
+    size += sizeIncrement;
 
     buffer = new OutputBuffer();
     rowCount = 0;
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnValues.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnValues.java
index a47fc85..9df153f 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnValues.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnValues.java
@@ -34,7 +34,7 @@
   private InputBuffer values;
   private int block = -1;
   private long row = 0;
-  private T previous; 
+  private T previous;
 
   private int arrayLength;
 
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/DeflateCodec.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/DeflateCodec.java
index 7e9a0be..96077bb 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/DeflateCodec.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/DeflateCodec.java
@@ -45,13 +45,13 @@
     writeAndClose(data, new InflaterOutputStream(baos, getInflater()));
     return ByteBuffer.wrap(baos.toByteArray());
   }
-  
+
   private void writeAndClose(ByteBuffer data, OutputStream out)
     throws IOException {
     out.write(data.array(), data.position(), data.remaining());
     out.close();
   }
-  
+
   private Inflater getInflater() {
     if (null == inflater)
       inflater = new Inflater(true);
@@ -65,7 +65,7 @@
     deflater.reset();
     return deflater;
   }
-  
+
   private ByteArrayOutputStream getOutputBuffer(int suggestedLength) {
     if (null == outputBuffer)
       outputBuffer = new ByteArrayOutputStream(suggestedLength);
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/InputBuffer.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/InputBuffer.java
index e3bd415..4023731 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/InputBuffer.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/InputBuffer.java
@@ -35,7 +35,7 @@
   private int limit;                              // end of valid buffer data
 
   private CharsetDecoder utf8 = Charset.forName("UTF-8").newDecoder();
-  
+
   private int bitCount;                           // position in booleans
 
   private int runLength;                          // length of run
@@ -233,7 +233,7 @@
     }
     return (l >>> 1) ^ -(l & 1); // back to two's-complement
   }
-  
+
   // splitting readLong up makes it faster because of the JVM does more
   // optimizations on small methods
   private long innerLongDecode(long l) throws IOException {
@@ -302,7 +302,7 @@
     byte[] bytes = new byte[length];
     readFully(bytes, 0, length);
     return utf8.decode(ByteBuffer.wrap(bytes, 0, length)).toString();
-  }  
+  }
 
   public byte[] readBytes() throws IOException {
     byte[] result = new byte[readInt()];
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/MetaData.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/MetaData.java
index 648d8a1..08d0072 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/MetaData.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/MetaData.java
@@ -44,7 +44,7 @@
     setReserved(CODEC_KEY, codec);
     return (T)this;
   }
-   
+
   /** Return the checksum algorithm name. */
   public String getChecksum() { return getString(CHECKSUM_KEY); }
 
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java
index 41174fb..0bba8ac 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java
@@ -98,7 +98,7 @@
     int len = bytes.limit() - pos;
     writeBytes(bytes.array(), start, len);
   }
-  
+
   public void writeBytes(byte[] bytes) throws IOException {
     writeBytes(bytes, 0, bytes.length);
   }
@@ -158,7 +158,7 @@
           }
         }
       }
-    } 
+    }
     buf[count++] = (byte) n;
   }
 
@@ -203,7 +203,7 @@
     }
     buf[count++] = (byte) n;
   }
-  
+
   private void ensure(int n) {
     if (count + n > buf.length)
       buf = Arrays.copyOf(buf, Math.max(buf.length << 1, count + n));
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ValueType.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ValueType.java
index 712a7d9..dddcb5a 100644
--- a/lang/java/trevni/core/src/main/java/org/apache/trevni/ValueType.java
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ValueType.java
@@ -30,5 +30,5 @@
   public static ValueType forName(String name) {
     return valueOf(name.toUpperCase());
   }
- 
+
 }
diff --git a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestBZip2Codec.java b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestBZip2Codec.java
index c4a39de..ac7c81b 100644
--- a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestBZip2Codec.java
+++ b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestBZip2Codec.java
@@ -25,42 +25,42 @@
 import org.junit.Test;
 
 public class TestBZip2Codec {
-  
+
   @Test
   public void testBZip2CompressionAndDecompression() throws IOException {
-    
+
     MetaData meta = new MetaData();
     meta.setCodec("bzip2");
     Codec codec = Codec.get(meta);
-    
+
     //Confirm that the right codec Came back
     assertTrue(codec instanceof BZip2Codec);
-    
+
     //This is 3 times the byte buffer on the BZip2 decompress plus some extra
     final int inputByteSize = BZip2Codec.DEFAULT_BUFFER_SIZE * 3 + 42;
-    
+
     byte[] inputByteArray = new byte[inputByteSize];
-    
+
     //Generate something that will compress well
     for (int i = 0; i < inputByteSize; i++) {
       inputByteArray[i] = (byte)(65 + i % 10);
     }
-    
+
     ByteBuffer inputByteBuffer = ByteBuffer.wrap(inputByteArray);
-    
+
     ByteBuffer compressedBuffer = codec.compress(inputByteBuffer);
-    
+
     //Make sure something returned
     assertTrue(compressedBuffer.array().length > 0);
     //Make sure the compressed output is smaller then the original
     assertTrue(compressedBuffer.array().length < inputByteArray.length);
-    
+
     ByteBuffer decompressedBuffer = codec.decompress(compressedBuffer);
-    
+
     //The original array should be the same length as the decompressed array
     assertTrue(decompressedBuffer.array().length == inputByteArray.length);
-    
-    //Every byte in the outputByteArray should equal every byte in the input array 
+
+    //Every byte in the outputByteArray should equal every byte in the input array
     byte[] outputByteArray = decompressedBuffer.array();
     for (int i = 0; i < inputByteSize; i++) {
       inputByteArray[i] = outputByteArray[i];
diff --git a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java
index 707848e..a0b1068 100644
--- a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java
+++ b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java
@@ -52,7 +52,7 @@
     OutputBuffer out = new OutputBuffer();
     for (int i = 0; i < COUNT; i++)
       out.writeValue(random.nextBoolean(), ValueType.BOOLEAN);
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     random = TestUtil.createRandom();
     for (int i = 0; i < COUNT; i++)
@@ -65,7 +65,7 @@
     OutputBuffer out = new OutputBuffer();
     for (int i = 0; i < COUNT; i++)
       out.writeInt(random.nextInt());
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     random = TestUtil.createRandom();
     for (int i = 0; i < COUNT; i++)
@@ -77,7 +77,7 @@
     OutputBuffer out = new OutputBuffer();
     for (int i = 0; i < COUNT; i++)
       out.writeLong(random.nextLong());
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     random = TestUtil.createRandom();
     for (int i = 0; i < COUNT; i++)
@@ -89,7 +89,7 @@
     OutputBuffer out = new OutputBuffer();
     for (int i = 0; i < COUNT; i++)
       out.writeFixed32(random.nextInt());
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     random = TestUtil.createRandom();
     for (int i = 0; i < COUNT; i++)
@@ -101,41 +101,41 @@
     OutputBuffer out = new OutputBuffer();
     for (int i = 0; i < COUNT; i++)
       out.writeFixed64(random.nextLong());
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     random = TestUtil.createRandom();
     for (int i = 0; i < COUNT; i++)
       Assert.assertEquals(random.nextLong(), in.readFixed64());
   }
-  
+
   @Test public void testFloat() throws Exception {
     Random random = TestUtil.createRandom();
     OutputBuffer out = new OutputBuffer();
     for (int i = 0; i < COUNT; i++)
       out.writeFloat(random.nextFloat());
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     random = TestUtil.createRandom();
     for (int i = 0; i < COUNT; i++)
       Assert.assertEquals(random.nextFloat(), in.readFloat(), 0);
   }
-  
+
   @Test public void testDouble() throws Exception {
     OutputBuffer out = new OutputBuffer();
     for (int i = 0; i < COUNT; i++)
       out.writeDouble(Double.MIN_VALUE);
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     for (int i = 0; i < COUNT; i++)
       Assert.assertEquals(Double.MIN_VALUE, in.readDouble(), 0);
   }
-  
+
   @Test public void testBytes() throws Exception {
     Random random = TestUtil.createRandom();
     OutputBuffer out = new OutputBuffer();
     for (int i = 0; i < COUNT; i++)
       out.writeBytes(TestUtil.randomBytes(random));
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     random = TestUtil.createRandom();
     for (int i = 0; i < COUNT; i++)
@@ -147,7 +147,7 @@
     OutputBuffer out = new OutputBuffer();
     for (int i = 0; i < COUNT; i++)
       out.writeString(TestUtil.randomString(random));
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     random = TestUtil.createRandom();
     for (int i = 0; i < COUNT; i++)
@@ -158,7 +158,7 @@
     OutputBuffer out = new OutputBuffer();
     out.writeValue(null, ValueType.NULL);
     out.writeLong(sentinel);
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     in.skipValue(ValueType.NULL);
     Assert.assertEquals(sentinel, in.readLong());
@@ -168,7 +168,7 @@
     OutputBuffer out = new OutputBuffer();
     out.writeValue(false, ValueType.BOOLEAN);
     out.writeLong(sentinel);
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     in.skipValue(ValueType.BOOLEAN);
     Assert.assertEquals(sentinel, in.readLong());
@@ -178,7 +178,7 @@
     OutputBuffer out = new OutputBuffer();
     out.writeValue(Integer.MAX_VALUE, ValueType.INT);
     out.writeLong(sentinel);
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     in.skipValue(ValueType.INT);
     Assert.assertEquals(sentinel, in.readLong());
@@ -188,7 +188,7 @@
     OutputBuffer out = new OutputBuffer();
     out.writeValue(Long.MAX_VALUE, ValueType.LONG);
     out.writeLong(sentinel);
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     in.skipValue(ValueType.LONG);
     Assert.assertEquals(sentinel, in.readLong());
@@ -198,7 +198,7 @@
     OutputBuffer out = new OutputBuffer();
     out.writeValue(Integer.MAX_VALUE, ValueType.FIXED32);
     out.writeLong(sentinel);
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     in.skipValue(ValueType.LONG);
     Assert.assertEquals(sentinel, in.readLong());
@@ -208,7 +208,7 @@
     OutputBuffer out = new OutputBuffer();
     out.writeValue(Long.MAX_VALUE, ValueType.FIXED64);
     out.writeLong(sentinel);
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     in.skipValue(ValueType.LONG);
     Assert.assertEquals(sentinel, in.readLong());
@@ -218,7 +218,7 @@
     OutputBuffer out = new OutputBuffer();
     out.writeValue(Float.MAX_VALUE, ValueType.FLOAT);
     out.writeLong(sentinel);
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     in.skipValue(ValueType.FLOAT);
     Assert.assertEquals(sentinel, in.readLong());
@@ -228,7 +228,7 @@
     OutputBuffer out = new OutputBuffer();
     out.writeValue(Double.MAX_VALUE, ValueType.DOUBLE);
     out.writeLong(sentinel);
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     in.skipValue(ValueType.DOUBLE);
     Assert.assertEquals(sentinel, in.readLong());
@@ -238,7 +238,7 @@
     OutputBuffer out = new OutputBuffer();
     out.writeValue("trevni", ValueType.STRING);
     out.writeLong(sentinel);
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     in.skipValue(ValueType.STRING);
     Assert.assertEquals(sentinel, in.readLong());
@@ -248,7 +248,7 @@
     OutputBuffer out = new OutputBuffer();
     out.writeValue("trevni".getBytes(), ValueType.BYTES);
     out.writeLong(sentinel);
-    
+
     InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
     in.skipValue(ValueType.BYTES);
     Assert.assertEquals(sentinel, in.readLong());
diff --git a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestInputBytes.java b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestInputBytes.java
index c4cb2aa..c55b532 100644
--- a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestInputBytes.java
+++ b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestInputBytes.java
@@ -38,7 +38,7 @@
     random.nextBytes(data);
 
     Input in = new InputBytes(data);
-      
+
     for (int i = 0; i < COUNT; i++) {
       int p = random.nextInt(length);
       int l = Math.min(random.nextInt(SIZE/10), length-p);
diff --git a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestUtil.java b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestUtil.java
index ab4796d..22bbae9 100644
--- a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestUtil.java
+++ b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestUtil.java
@@ -39,7 +39,7 @@
       String configured = System.getProperty("test.seed");
       if (configured != null)
         seed = Long.valueOf(configured);
-      else 
+      else
         seed = System.currentTimeMillis();
       System.err.println("test.seed="+seed);
       seedSet = true;
diff --git a/lang/py/build.xml b/lang/py/build.xml
index 61c3f4c..5ef5214 100644
--- a/lang/py/build.xml
+++ b/lang/py/build.xml
@@ -17,7 +17,7 @@
 -->
 
 <project name="Avro" default="dist" xmlns:ivy="antlib:org.apache.ivy.ant">
- 
+
   <!-- Load user's default properties. -->
   <property file="${user.home}/build.properties"/>
 
@@ -66,7 +66,7 @@
       </classpath>
     </typedef>
   </target>
-  
+
   <target name="ivy-download" unless="ivy.jar.found" >
     <get src="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar" dest="${ivy.jar}" usetimestamp="true" />
   </target>
@@ -79,13 +79,13 @@
       <fileset dir="${src.dir}">
         <exclude name="**/*.pyc"/>
         <exclude name="**/*.py~"/>
-      </fileset> 
+      </fileset>
     </copy>
     <copy todir="${build.dir}/test">
       <fileset dir="${test.dir}">
         <exclude name="**/*.pyc"/>
         <exclude name="**/*.py~"/>
-      </fileset> 
+      </fileset>
     </copy>
     <copy todir="${build.dir}/lib">
       <fileset dir="${lib.dir}" />
@@ -102,9 +102,9 @@
           toFile="${build.dir}/src/avro/ipc.py"
           overwrite="true">
       <filterset>
-        <filter token="HANDSHAKE_REQUEST_SCHEMA" 
+        <filter token="HANDSHAKE_REQUEST_SCHEMA"
           value="${handshake.request.json}"/>
-        <filter token="HANDSHAKE_RESPONSE_SCHEMA" 
+        <filter token="HANDSHAKE_RESPONSE_SCHEMA"
           value="${handshake.response.json}"/>
      </filterset>
     </copy>
diff --git a/lang/py/ivysettings.xml b/lang/py/ivysettings.xml
index 31de16e..22104c7 100644
--- a/lang/py/ivysettings.xml
+++ b/lang/py/ivysettings.xml
@@ -19,8 +19,8 @@
   <property name="m2-pattern" value="${user.home}/.m2/repository/[organisation]/[module]/[revision]/[module]-[revision](-[classifier]).[ext]" override="false" />
   <resolvers>
     <chain name="repos">
-      <ibiblio name="central" m2compatible="true"/>   
-      <ibiblio name="apache-snapshots" m2compatible="true" root="https://repository.apache.org/content/groups/snapshots"/> 
+      <ibiblio name="central" m2compatible="true"/>
+      <ibiblio name="apache-snapshots" m2compatible="true" root="https://repository.apache.org/content/groups/snapshots"/>
       <filesystem name="local-maven2" m2compatible="true"> <!-- needed when building non-snapshot version for release -->
         <artifact pattern="${m2-pattern}"/>
         <ivy pattern="${m2-pattern}"/>
diff --git a/lang/py3/scripts/avro b/lang/py3/scripts/avro
old mode 100644
new mode 100755
diff --git a/share/test/interop/bin/test_rpc_interop.sh b/share/test/interop/bin/test_rpc_interop.sh
index 75681be..20ee77f 100755
--- a/share/test/interop/bin/test_rpc_interop.sh
+++ b/share/test/interop/bin/test_rpc_interop.sh
@@ -15,13 +15,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-set -e						  # exit on error
+set -e                          # exit on error
 
-cd `dirname "$0"`/../../../..			  # connect to root
+cd `dirname "$0"`/../../../..   # connect to root
 
 VERSION=`cat share/VERSION.txt`
 
-#set -x						  # echo commands
+#set -x                          # echo commands
 
 java_client="java -jar lang/java/tools/target/avro-tools-$VERSION.jar rpcsend"
 java_server="java -jar lang/java/tools/target/avro-tools-$VERSION.jar rpcreceive"
@@ -32,7 +32,7 @@
 ruby_client="ruby -rubygems -Ilang/ruby/lib lang/ruby/test/tool.rb rpcsend"
 ruby_server="ruby -rubygems -Ilang/ruby/lib lang/ruby/test/tool.rb rpcreceive"
 
-export PYTHONPATH=lang/py/build/src	  # path to avro Python module
+export PYTHONPATH=lang/py/build/src      # path to avro Python module
 
 clients=("$java_client" "$py_client" "$ruby_client")
 servers=("$java_server" "$py_server" "$ruby_server")
@@ -42,41 +42,41 @@
 portfile=/tmp/interop_$$
 
 function cleanup() {
-    rm -rf $portfile
-    for job in `jobs -p` ; do kill $job; done
+  rm -rf $portfile
+  for job in `jobs -p` ; do kill $job; done
 }
 
 trap 'cleanup' EXIT
 
 for server in "${servers[@]}"
 do
-    for msgDir in share/test/interop/rpc/*
+  for msgDir in share/test/interop/rpc/*
+  do
+    msg=`basename "$msgDir"`
+    for c in ${msgDir}/*
     do
-	msg=`basename "$msgDir"`
-	for c in ${msgDir}/*
-	do
-	    echo TEST: $c
-	    for client in "${clients[@]}"
-	    do
+      echo TEST: $c
+      for client in "${clients[@]}"
+      do
         rm -rf $portfile
-		$server http://127.0.0.1:0/ $proto $msg -file $c/response.avro \
-		    > $portfile &
+        $server http://127.0.0.1:0/ $proto $msg -file $c/response.avro \
+            > $portfile &
         count=0
         while [ ! -s $portfile ]
         do
-            sleep 1
-            if [ $count -ge 10 ]
-            then
-                echo $server did not start.
-                exit 1
-            fi
-            count=`expr $count + 1`
+          sleep 1
+          if [ $count -ge 10 ]
+          then
+            echo $server did not start.
+            exit 1
+          fi
+          count=`expr $count + 1`
         done
-		read ignore port < $portfile
-	    	$client http://127.0.0.1:$port $proto $msg -file $c/request.avro
-		wait
-	    done
-	done
+        read ignore port < $portfile
+        $client http://127.0.0.1:$port $proto $msg -file $c/request.avro
+        wait
+        done
+    done
     done
 done