MAPREDUCE-1867. Remove unused methods in org.apache.hadoop.streaming.StreamUtil. Contributed by Amareshwari Sriramadasu.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/mapreduce/trunk@1027685 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/CHANGES.txt b/CHANGES.txt
index 711e138..33c913a 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -347,6 +347,9 @@
     MAPREDUCE-2143.  HarFileSystem is able to handle spaces in pathnames.
     (Ramkumar Vadali via dhruba)
 
+    MAPREDUCE-1867.  Remove unused methods in
+    org.apache.hadoop.streaming.StreamUtil.  (amareshwari via tomwhite)
+
 Release 0.21.1 - Unreleased
 
   NEW FEATURES
diff --git a/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java b/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java
index 384b297..f8b45f3 100644
--- a/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java
+++ b/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java
@@ -22,14 +22,19 @@
 import java.net.InetAddress;
 import java.util.*;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * This is a class used to get the current environment
  * on the host machines running the map/reduce. This class
  * assumes that setting the environment in streaming is 
  * allowed on windows/ix/linuz/freebsd/sunos/solaris/hp-ux
  */
+@InterfaceAudience.Private
 public class Environment extends Properties {
 
+  private static final long serialVersionUID = 1L;
+
   public Environment() throws IOException {
     // Extend this code to fit all operating
     // environments that you expect to run in
@@ -78,7 +83,7 @@
   // to be used with Runtime.exec(String[] cmdarray, String[] envp) 
   String[] toArray() {
     String[] arr = new String[super.size()];
-    Enumeration it = super.keys();
+    Enumeration<Object> it = super.keys();
     int i = -1;
     while (it.hasMoreElements()) {
       String key = (String) it.nextElement();
diff --git a/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java b/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java
index b720c64..e39df2b 100644
--- a/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java
+++ b/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java
@@ -19,36 +19,32 @@
 package org.apache.hadoop.streaming;
 
 import java.io.*;
-import java.util.*;
-import java.util.Map.Entry;
+
+import org.apache.hadoop.classification.InterfaceAudience;
 
 /**
- * Maps a relative pathname to an absolute pathname using the
- * PATH enviroment.
+ * Maps a relative pathname to an absolute pathname using the PATH environment.
  */
-public class PathFinder
-{
-  String pathenv;        // a string of pathnames
-  String pathSep;        // the path seperator
-  String fileSep;        // the file seperator in a directory
+@InterfaceAudience.Private
+public class PathFinder {
+  String pathenv; // a string of pathnames
+  String pathSep; // the path separator
+  String fileSep; // the file separator in a directory
 
   /**
-   * Construct a PathFinder object using the path from
-   * java.class.path
+   * Construct a PathFinder object using the path from java.class.path
    */
-  public PathFinder()
-  {
+  public PathFinder() {
     pathenv = System.getProperty("java.class.path");
     pathSep = System.getProperty("path.separator");
     fileSep = System.getProperty("file.separator");
   }
 
   /**
-   * Construct a PathFinder object using the path from
-   * the specified system environment variable.
+   * Construct a PathFinder object using the path from the specified system
+   * environment variable.
    */
-  public PathFinder(String envpath)
-  {
+  public PathFinder(String envpath) {
     pathenv = System.getenv(envpath);
     pathSep = System.getProperty("path.separator");
     fileSep = System.getProperty("file.separator");
@@ -57,85 +53,45 @@
   /**
    * Appends the specified component to the path list
    */
-  public void prependPathComponent(String str)
-  {
+  public void prependPathComponent(String str) {
     pathenv = str + pathSep + pathenv;
   }
 
   /**
-   * Returns the full path name of this file if it is listed in the
-   * path
+   * Returns the full path name of this file if it is listed in the path
    */
-  public File getAbsolutePath(String filename)
-  {
-    if (pathenv == null || pathSep == null  || fileSep == null)
-      {
-        return null;
-      }
-    int     val = -1;
-    String    classvalue = pathenv + pathSep;
+  public File getAbsolutePath(String filename) {
+    if (pathenv == null || pathSep == null || fileSep == null) {
+      return null;
+    }
+    int val = -1;
+    String classvalue = pathenv + pathSep;
 
-    while (((val = classvalue.indexOf(pathSep)) >= 0) &&
-           classvalue.length() > 0) {
-      //
+    while (((val = classvalue.indexOf(pathSep)) >= 0)
+        && classvalue.length() > 0) {
       // Extract each entry from the pathenv
-      //
       String entry = classvalue.substring(0, val).trim();
       File f = new File(entry);
 
-      try {
-        if (f.isDirectory()) {
-          //
-          // this entry in the pathenv is a directory.
-          // see if the required file is in this directory
-          //
-          f = new File(entry + fileSep + filename);
-        }
-        //
-        // see if the filename matches and  we can read it
-        //
-        if (f.isFile() && f.canRead()) {
-          return f;
-        }
-      } catch (Exception exp){ }
-      classvalue = classvalue.substring(val+1).trim();
+      if (f.isDirectory()) {
+        // this entry in the pathenv is a directory.
+        // see if the required file is in this directory
+        f = new File(entry + fileSep + filename);
+      }
+      // see if the filename matches and we can read it
+      if (f.isFile() && f.canRead()) {
+        return f;
+      }
+      classvalue = classvalue.substring(val + 1).trim();
     }
     return null;
   }
 
-  /**
-   * prints all environment variables for this process
-   */
-  private static void printEnvVariables() {
-    System.out.println("Environment Variables: ");
-    Map<String,String> map = System.getenv();
-    Set<Entry<String, String>> entrySet = map.entrySet();
-    for(Entry<String, String> entry : entrySet) {
-      System.out.println(entry.getKey() + " = " + entry.getValue());
-    }
-  }
-
-  /**
-   * prints all system properties for this process
-   */
-  private static void printSystemProperties() {
-    System.out.println("System properties: ");
-    java.util.Properties p = System.getProperties();
-    java.util.Enumeration keys = p.keys();
-    while(keys.hasMoreElements()) {
-      String thiskey = (String)keys.nextElement();
-      String value = p.getProperty(thiskey);
-      System.out.println(thiskey + " = " + value);
-    }
-  }
-
   public static void main(String args[]) throws IOException {
-
     if (args.length < 1) {
       System.out.println("Usage: java PathFinder <filename>");
       System.exit(1);
     }
-
     PathFinder finder = new PathFinder("PATH");
     File file = finder.getAbsolutePath(args[0]);
     if (file != null) {
diff --git a/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java b/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
index 52bf3f6..0fe9dbd 100644
--- a/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
+++ b/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
@@ -233,7 +233,6 @@
       LOG.info("JobConf set minRecWrittenToEnableSkip_ ="
           + minRecWrittenToEnableSkip_);
     }
-    taskId_ = StreamUtil.getTaskInfo(job_);
   }
 
   void addJobConfToEnvironment(JobConf conf, Properties env) {
@@ -611,7 +610,6 @@
 
   // set in PipeMapper/PipeReducer subclasses
   int numExceptions_;
-  StreamUtil.TaskId taskId_;
 
   protected volatile Throwable outerrThreadsThrowable;
 
diff --git a/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java b/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
index 6dcfeea..4b86a8d 100644
--- a/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
+++ b/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
@@ -66,6 +66,7 @@
 import org.apache.hadoop.streaming.io.OutputReader;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.RunJar;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 
@@ -991,7 +992,7 @@
     if (jar_ != null && isLocalHadoop()) {
       // getAbs became required when shell and subvm have different working dirs...
       File wd = new File(".").getAbsoluteFile();
-      StreamUtil.unJar(new File(jar_), wd);
+      RunJar.unJar(new File(jar_), wd);
     }
 
     // if jobConf_ changes must recreate a JobClient
diff --git a/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java b/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
index 88ef99b..281e1df 100644
--- a/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
+++ b/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
@@ -18,35 +18,23 @@
 
 package org.apache.hadoop.streaming;
 
-import java.text.DecimalFormat;
 import java.io.*;
 import java.net.*;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Enumeration;
-import java.util.Iterator;
-import java.util.List;
-import java.util.jar.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
-/** Utilities not available elsewhere in Hadoop.
- *  
+/** 
+ * Utilities used in streaming
  */
+@InterfaceAudience.Private
 public class StreamUtil {
 
-  private static final Log LOG = 
-    LogFactory.getLog(StreamUtil.class.getName());
-  
   /** It may seem strange to silently switch behaviour when a String
    * is not a classname; the reason is simplified Usage:<pre>
    * -mapper [classname | program ]
@@ -114,166 +102,6 @@
     return codePath;
   }
 
-  // copied from TaskRunner  
-  static void unJar(File jarFile, File toDir) throws IOException {
-    JarFile jar = new JarFile(jarFile);
-    try {
-      Enumeration entries = jar.entries();
-      while (entries.hasMoreElements()) {
-        JarEntry entry = (JarEntry) entries.nextElement();
-        if (!entry.isDirectory()) {
-          InputStream in = jar.getInputStream(entry);
-          try {
-            File file = new File(toDir, entry.getName());
-            boolean b = file.getParentFile().mkdirs();
-            if (!b) { LOG.warn("Ignoring failure of mkdirs"); }
-            OutputStream out = new FileOutputStream(file);
-            try {
-              byte[] buffer = new byte[8192];
-              int i;
-              while ((i = in.read(buffer)) != -1) {
-                out.write(buffer, 0, i);
-              }
-            } finally {
-              out.close();
-            }
-          } finally {
-            in.close();
-          }
-        }
-      }
-    } finally {
-      jar.close();
-    }
-  }
-
-  final static long KB = 1024L * 1;
-  final static long MB = 1024L * KB;
-  final static long GB = 1024L * MB;
-  final static long TB = 1024L * GB;
-  final static long PB = 1024L * TB;
-
-  static DecimalFormat dfm = new DecimalFormat("####.000");
-  static DecimalFormat ifm = new DecimalFormat("###,###,###,###,###");
-
-  public static String dfmt(double d) {
-    return dfm.format(d);
-  }
-
-  public static String ifmt(double d) {
-    return ifm.format(d);
-  }
-
-  public static String formatBytes(long numBytes) {
-    StringBuffer buf = new StringBuffer();
-    boolean bDetails = true;
-    double num = numBytes;
-
-    if (numBytes < KB) {
-      buf.append(numBytes).append(" B");
-      bDetails = false;
-    } else if (numBytes < MB) {
-      buf.append(dfmt(num / KB)).append(" KB");
-    } else if (numBytes < GB) {
-      buf.append(dfmt(num / MB)).append(" MB");
-    } else if (numBytes < TB) {
-      buf.append(dfmt(num / GB)).append(" GB");
-    } else if (numBytes < PB) {
-      buf.append(dfmt(num / TB)).append(" TB");
-    } else {
-      buf.append(dfmt(num / PB)).append(" PB");
-    }
-    if (bDetails) {
-      buf.append(" (").append(ifmt(numBytes)).append(" bytes)");
-    }
-    return buf.toString();
-  }
-
-  public static String formatBytes2(long numBytes) {
-    StringBuffer buf = new StringBuffer();
-    long u = 0;
-    if (numBytes >= TB) {
-      u = numBytes / TB;
-      numBytes -= u * TB;
-      buf.append(u).append(" TB ");
-    }
-    if (numBytes >= GB) {
-      u = numBytes / GB;
-      numBytes -= u * GB;
-      buf.append(u).append(" GB ");
-    }
-    if (numBytes >= MB) {
-      u = numBytes / MB;
-      numBytes -= u * MB;
-      buf.append(u).append(" MB ");
-    }
-    if (numBytes >= KB) {
-      u = numBytes / KB;
-      buf.append(u).append(" KB ");
-    }
-    buf.append(u).append(" B"); //even if zero
-    return buf.toString();
-  }
-
-  static Environment env;
-  static String HOST;
-
-  static {
-    try {
-      env = new Environment();
-      HOST = env.getHost();
-    } catch (IOException io) {
-      io.printStackTrace();
-    }
-  }
-
-  static class StreamConsumer extends Thread {
-
-    StreamConsumer(InputStream in, OutputStream out) {
-      this.bin = new LineNumberReader(new BufferedReader(new InputStreamReader(in)));
-      if (out != null) {
-        this.bout = new DataOutputStream(out);
-      }
-    }
-
-    public void run() {
-      try {
-        String line;
-        while ((line = bin.readLine()) != null) {
-          if (bout != null) {
-            bout.writeUTF(line); //writeChars
-            bout.writeChar('\n');
-          }
-        }
-        bout.flush();
-      } catch (IOException io) {
-      }
-    }
-
-    LineNumberReader bin;
-    DataOutputStream bout;
-  }
-
-  static void exec(String arg, PrintStream log) {
-    exec(new String[] { arg }, log);
-  }
-
-  static void exec(String[] args, PrintStream log) {
-    try {
-      log.println("Exec: start: " + Arrays.asList(args));
-      Process proc = Runtime.getRuntime().exec(args);
-      new StreamConsumer(proc.getErrorStream(), log).start();
-      new StreamConsumer(proc.getInputStream(), log).start();
-      int status = proc.waitFor();
-      //if status != 0
-      log.println("Exec: status=" + status + ": " + Arrays.asList(args));
-    } catch (InterruptedException in) {
-      in.printStackTrace();
-    } catch (IOException io) {
-      io.printStackTrace();
-    }
-  }
-
   static String qualifyHost(String url) {
     try {
       return qualifyHost(new URL(url)).toString();
@@ -308,15 +136,6 @@
     return buf.toString();
   }
 
-  public static String safeGetCanonicalPath(File f) {
-    try {
-      String s = f.getCanonicalPath();
-      return (s == null) ? f.toString() : s;
-    } catch (IOException io) {
-      return f.toString();
-    }
-  }
-
   static String slurp(File f) throws IOException {
     int len = (int) f.length();
     byte[] buf = new byte[len];
@@ -345,165 +164,31 @@
     return contents;
   }
 
-  public static String rjustify(String s, int width) {
-    if (s == null) s = "null";
-    if (width > s.length()) {
-      s = getSpace(width - s.length()) + s;
-    }
-    return s;
-  }
+  static private Environment env;
+  static String HOST;
 
-  public static String ljustify(String s, int width) {
-    if (s == null) s = "null";
-    if (width > s.length()) {
-      s = s + getSpace(width - s.length());
-    }
-    return s;
-  }
-
-  static char[] space;
   static {
-    space = new char[300];
-    Arrays.fill(space, '\u0020');
-  }
-
-  public static String getSpace(int len) {
-    if (len > space.length) {
-      space = new char[Math.max(len, 2 * space.length)];
-      Arrays.fill(space, '\u0020');
-    }
-    return new String(space, 0, len);
-  }
-
-  static private Environment env_;
-
-  static Environment env() {
-    if (env_ != null) {
-      return env_;
-    }
     try {
-      env_ = new Environment();
+      env = new Environment();
+      HOST = env.getHost();
     } catch (IOException io) {
       io.printStackTrace();
     }
-    return env_;
   }
 
-  public static String makeJavaCommand(Class main, String[] argv) {
-    ArrayList vargs = new ArrayList();
-    File javaHomeBin = new File(System.getProperty("java.home"), "bin");
-    File jvm = new File(javaHomeBin, "java");
-    vargs.add(jvm.toString());
-    // copy parent classpath
-    vargs.add("-classpath");
-    vargs.add("\"" + System.getProperty("java.class.path") + "\"");
-
-    // add heap-size limit
-    vargs.add("-Xmx" + Runtime.getRuntime().maxMemory());
-
-    // Add main class and its arguments
-    vargs.add(main.getName());
-    for (int i = 0; i < argv.length; i++) {
-      vargs.add(argv[i]);
+  static Environment env() {
+    if (env != null) {
+      return env;
     }
-    return collate(vargs, " ");
-  }
-
-  public static String collate(Object[] args, String sep) {
-    return collate(Arrays.asList(args), sep);
-  }
-
-  public static String collate(List args, String sep) {
-    StringBuffer buf = new StringBuffer();
-    Iterator it = args.iterator();
-    while (it.hasNext()) {
-      if (buf.length() > 0) {
-        buf.append(" ");
-      }
-      buf.append(it.next());
+    try {
+      env = new Environment();
+    } catch (IOException io) {
+      io.printStackTrace();
     }
-    return buf.toString();
-  }
-
-  // JobConf helpers
-
-  public static FileSplit getCurrentSplit(JobConf job) {
-    String path = job.get(MRJobConfig.MAP_INPUT_FILE);
-    if (path == null) {
-      return null;
-    }
-    Path p = new Path(path);
-    long start = Long.parseLong(job.get(MRJobConfig.MAP_INPUT_START));
-    long length = Long.parseLong(job.get(MRJobConfig.MAP_INPUT_PATH));
-    return new FileSplit(p, start, length, job);
-  }
-
-  static class TaskId {
-
-    boolean mapTask;
-    String jobid;
-    int taskid;
-    int execid;
+    return env;
   }
 
   public static boolean isLocalJobTracker(JobConf job) {
     return job.get(JTConfig.JT_IPC_ADDRESS, "local").equals("local");
   }
-
-  public static TaskId getTaskInfo(JobConf job) {
-    TaskId res = new TaskId();
-
-    String id = job.get(MRJobConfig.TASK_ATTEMPT_ID);
-    if (isLocalJobTracker(job)) {
-      // it uses difft naming 
-      res.mapTask = job.getBoolean(MRJobConfig.TASK_ISMAP, true);
-      res.jobid = "0";
-      res.taskid = 0;
-      res.execid = 0;
-    } else {
-      String[] e = id.split("_");
-      res.mapTask = e[3].equals("m");
-      res.jobid = e[1] + "_" + e[2];
-      res.taskid = Integer.parseInt(e[4]);
-      res.execid = Integer.parseInt(e[5]);
-    }
-    return res;
-  }
-
-  public static void touch(File file) throws IOException {
-    file = file.getAbsoluteFile();
-    FileOutputStream out = new FileOutputStream(file);
-    out.close();
-    if (!file.exists()) {
-      throw new IOException("touch failed: " + file);
-    }
-  }
-
-  public static boolean isCygwin() {
-    String OS = System.getProperty("os.name");
-    return (OS.indexOf("Windows") > -1);
-  }
-
-  public static String localizeBin(String path) {
-    if (isCygwin()) {
-      path = "C:/cygwin/" + path;
-    }
-    return path;
-  }
-  
-  /** @param name foo where &lt;junit>&lt;sysproperty key="foo" value="${foo}"/> 
-   * If foo is undefined then Ant sets the unevaluated value. 
-   * Take this into account when setting defaultVal. */
-  public static String getBoundAntProperty(String name, String defaultVal)
-  {
-    String val = System.getProperty(name);
-    if (val != null && val.indexOf("${") >= 0) {
-      val = null;
-    }
-    if (val == null) {
-      val = defaultVal;
-    }
-    return val;
-  }
-
 }
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
index 7210a7f..df2422c 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
@@ -34,8 +34,8 @@
   protected File INPUT_FILE = new File("input.txt");
   protected File OUTPUT_DIR = new File("out");
   protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
-  protected String map = StreamUtil.makeJavaCommand(RawBytesMapApp.class, new String[]{"."});
-  protected String reduce = StreamUtil.makeJavaCommand(RawBytesReduceApp.class, new String[0]);
+  protected String map = UtilTest.makeJavaCommand(RawBytesMapApp.class, new String[]{"."});
+  protected String reduce = UtilTest.makeJavaCommand(RawBytesReduceApp.class, new String[0]);
   protected String outputExpect = "are\t3\nblue\t1\nbunnies\t1\npink\t1\nred\t1\nroses\t1\nviolets\t1\n";
   
   public TestRawBytesStreaming() throws IOException {
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
index e169c52..b27a8c6 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
@@ -36,7 +36,7 @@
   protected File OUTPUT_DIR = new File("stream_aggregate_out");
   protected String input = "roses are red\nviolets are blue\nbunnies are pink\n";
   // map parses input lines and generates count entries for each word.
-  protected String map = StreamUtil.makeJavaCommand(StreamAggregate.class, new String[]{".", "\\n"});
+  protected String map = UtilTest.makeJavaCommand(StreamAggregate.class, new String[]{".", "\\n"});
   // Use the aggregate combine, reducei to aggregate the counts
   protected String outputExpect = "are\t3\nblue\t1\nbunnies\t1\npink\t1\nred\t1\nroses\t1\nviolets\t1\n";
 
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
index f3d2f6a..14f0f96 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
@@ -41,7 +41,7 @@
   protected File OUTPUT_DIR = new File("out_for_data_protocol_test");
   protected String input = "roses.smell.good\nroses.look.good\nroses.need.care\nroses.attract.bees\nroses.are.red\nroses.are.not.blue\nbunnies.are.pink\nbunnies.run.fast\nbunnies.have.short.tail\nbunnies.have.long.ears\n";
   // map behaves like "/usr/bin/cat"; 
-  protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{".", "."});
+  protected String map = UtilTest.makeJavaCommand(TrApp.class, new String[]{".", "."});
   // reduce counts the number of values for each key
   protected String reduce = "org.apache.hadoop.streaming.ValueCountReduce";
   protected String outputExpect = "bunnies.are\t1\nbunnies.have\t2\nbunnies.run\t1\nroses.are\t2\nroses.attract\t1\nroses.look\t1\nroses.need\t1\nroses.smell\t1\n";
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
index 1ba0bbe..5da9e03 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
@@ -40,7 +40,7 @@
   protected File OUTPUT_DIR = new File("stream_reduce_none_out");
   protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
   // map parses input lines and generates count entries for each word.
-  protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{".", "\\n"});
+  protected String map = UtilTest.makeJavaCommand(TrApp.class, new String[]{".", "\\n"});
   protected String outputExpect = "roses\t\nare\t\nred\t\nviolets\t\nare\t\nblue\t\nbunnies\t\nare\t\npink\t\n";
 
   private StreamJob job;
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
index e465a86..809baaa 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
@@ -46,10 +46,10 @@
   protected String outDir;
   protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
   // map behaves like "/usr/bin/tr . \\n"; (split words into lines)
-  protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{".", "\\n"});
+  protected String map = UtilTest.makeJavaCommand(TrApp.class, new String[]{".", "\\n"});
   // reduce behave like /usr/bin/uniq. But also prepend lines with R.
   // command-line combiner does not have any effect any more.
-  protected String reduce = StreamUtil.makeJavaCommand(UniqApp.class, new String[]{"R"});
+  protected String reduce = UtilTest.makeJavaCommand(UniqApp.class, new String[]{"R"});
   protected String outputExpect = "Rare\t\nRblue\t\nRbunnies\t\nRpink\t\nRred\t\nRroses\t\nRviolets\t\n";
 
   protected ArrayList<String> args = new ArrayList<String>();
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java
index bb9eef9..5843269 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java
@@ -45,9 +45,9 @@
   private Path outputPath = new Path("output");
   private String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
   private String map =
-      StreamUtil.makeJavaCommand(TrApp.class, new String[] { ".", "\\n" });
+      UtilTest.makeJavaCommand(TrApp.class, new String[] { ".", "\\n" });
   private String reduce =
-      StreamUtil.makeJavaCommand(UniqApp.class, new String[] { "R" });
+      UtilTest.makeJavaCommand(UniqApp.class, new String[] { "R" });
 
   public void testStreaming()
       throws Exception {
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBackground.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBackground.java
index 5b24a2a..1a92283 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBackground.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBackground.java
@@ -18,15 +18,15 @@
 
 package org.apache.hadoop.streaming;
 
-import org.junit.Test;
-import org.junit.Before;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
-import java.io.*;
-import java.util.*;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import org.junit.Before;
+import org.junit.Test;
 
 /**
  * This class tests if hadoopStreaming background works fine. A DelayEchoApp
@@ -38,7 +38,7 @@
   protected File INPUT_FILE = new File(TEST_DIR, "input.txt");
   protected File OUTPUT_DIR = new File(TEST_DIR, "out");
 
-  protected String tenSecondsTask = StreamUtil.makeJavaCommand(
+  protected String tenSecondsTask = UtilTest.makeJavaCommand(
       DelayEchoApp.class, new String[] { "10" });
 
   public TestStreamingBackground() throws IOException {
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java
index 6b8ba04..858fc71 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java
@@ -56,9 +56,9 @@
     Arrays.asList("hey001","hey018");
   
   private static final String badMapper = 
-    StreamUtil.makeJavaCommand(BadApp.class, new String[]{});
+    UtilTest.makeJavaCommand(BadApp.class, new String[]{});
   private static final String badReducer = 
-    StreamUtil.makeJavaCommand(BadApp.class, new String[]{"true"});
+    UtilTest.makeJavaCommand(BadApp.class, new String[]{"true"});
   private static final int INPUTSIZE=100;
   
   public TestStreamingBadRecords() throws IOException
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java
index c4d0f03..3076d8e 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java
@@ -27,7 +27,7 @@
 
 public class TestStreamingCombiner extends TestStreaming {
 
-  protected String combine = StreamUtil.makeJavaCommand(
+  protected String combine = UtilTest.makeJavaCommand(
       UniqApp.class, new String[]{""});
   
   public TestStreamingCombiner() throws IOException {
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
index 630df26..f234305 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
@@ -40,8 +40,8 @@
   protected File INPUT_FILE = new File(TEST_DIR, "input.txt");
   protected File OUTPUT_DIR = new File(TEST_DIR, "out");
 
-  protected String failingTask = StreamUtil.makeJavaCommand(FailApp.class, new String[]{"true"});
-  protected String echoTask = StreamUtil.makeJavaCommand(FailApp.class, new String[]{"false"});
+  protected String failingTask = UtilTest.makeJavaCommand(FailApp.class, new String[]{"true"});
+  protected String echoTask = UtilTest.makeJavaCommand(FailApp.class, new String[]{"false"});
 
   public TestStreamingExitStatus() throws IOException {
     UtilTest utilTest = new UtilTest(getClass().getName());
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
index 969bdc6..f8167bb 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
@@ -42,11 +42,11 @@
   // mapreduce.input.keyvaluelinerecordreader.key.value.separator reads 1 as separator
   // stream.map.input.field.separator uses 2 as separator
   // map behaves like "/usr/bin/tr 2 3"; (translate 2 to 3)
-  protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{"2", "3"});
+  protected String map = UtilTest.makeJavaCommand(TrApp.class, new String[]{"2", "3"});
   // stream.map.output.field.separator recognize 3 as separator
   // stream.reduce.input.field.separator recognize 3 as separator
   // reduce behaves like "/usr/bin/tr 3 4"; (translate 3 to 4)
-  protected String reduce = StreamUtil.makeJavaCommand(TrAppReduce.class, new String[]{"3", "4"});
+  protected String reduce = UtilTest.makeJavaCommand(TrAppReduce.class, new String[]{"3", "4"});
   // stream.reduce.output.field.separator recognize 4 as separator
   // mapreduce.output.textoutputformat.separator outputs 5 as separator
   protected String outputExpect = "bunnies5are.pink\nroses5are.red\nviolets5are.blue\n";
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
index 3ee2b39..d6987c2 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
@@ -44,7 +44,7 @@
     return new String[] {
       "-input", input.getAbsolutePath(),
       "-output", output.getAbsolutePath(),
-      "-mapper", StreamUtil.makeJavaCommand(StderrApp.class,
+      "-mapper", UtilTest.makeJavaCommand(StderrApp.class,
                                             new String[]{Integer.toString(preLines),
                                                          Integer.toString(duringLines),
                                                          Integer.toString(postLines)}),
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
index 6e91687..a96709a 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
@@ -36,8 +36,8 @@
   protected File INPUT_FILE = new File("input.txt");
   protected File OUTPUT_DIR = new File("out");
   protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
-  protected String map = StreamUtil.makeJavaCommand(TypedBytesMapApp.class, new String[]{"."});
-  protected String reduce = StreamUtil.makeJavaCommand(TypedBytesReduceApp.class, new String[0]);
+  protected String map = UtilTest.makeJavaCommand(TypedBytesMapApp.class, new String[]{"."});
+  protected String reduce = UtilTest.makeJavaCommand(TypedBytesReduceApp.class, new String[0]);
   protected String outputExpect = "are\t3\nred\t1\nblue\t1\npink\t1\nroses\t1\nbunnies\t1\nviolets\t1\n";
   
   public TestTypedBytesStreaming() throws IOException {
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
index 8545079..068319c 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
@@ -78,7 +78,7 @@
    */
   @Test
   public void testCommandLine() {
-    if (StreamUtil.isCygwin()) {
+    if (UtilTest.isCygwin()) {
       return;
     }
     try {
@@ -89,7 +89,7 @@
       
       mr = new MiniMRCluster(numSlaves, fs.getUri().toString(), 1);
       writeInputFile(fs, inputPath);
-      map = StreamUtil.makeJavaCommand(UlimitApp.class, new String[]{});  
+      map = UtilTest.makeJavaCommand(UlimitApp.class, new String[]{});  
       runProgram(SET_MEMORY_LIMIT);
       fs.delete(outputPath, true);
       assertFalse("output not cleaned up", fs.exists(outputPath));
diff --git a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java
index 1fb3274..73e1565 100644
--- a/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java
+++ b/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java
@@ -22,6 +22,9 @@
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -82,6 +85,43 @@
     }
   }
 
+  public static String collate(List<String> args, String sep) {
+    StringBuffer buf = new StringBuffer();
+    Iterator<String> it = args.iterator();
+    while (it.hasNext()) {
+      if (buf.length() > 0) {
+        buf.append(" ");
+      }
+      buf.append(it.next());
+    }
+    return buf.toString();
+  }
+
+  public static String makeJavaCommand(Class<?> main, String[] argv) {
+    ArrayList<String> vargs = new ArrayList<String>();
+    File javaHomeBin = new File(System.getProperty("java.home"), "bin");
+    File jvm = new File(javaHomeBin, "java");
+    vargs.add(jvm.toString());
+    // copy parent classpath
+    vargs.add("-classpath");
+    vargs.add("\"" + System.getProperty("java.class.path") + "\"");
+  
+    // add heap-size limit
+    vargs.add("-Xmx" + Runtime.getRuntime().maxMemory());
+  
+    // Add main class and its arguments
+    vargs.add(main.getName());
+    for (int i = 0; i < argv.length; i++) {
+      vargs.add(argv[i]);
+    }
+    return collate(vargs, " ");
+  }
+
+  public static boolean isCygwin() {
+    String OS = System.getProperty("os.name");
+    return (OS.indexOf("Windows") > -1);
+  }
+
   /**
    * Is perl supported on this machine ?
    * @return true if perl is available and is working as expected