blob: 320ca429c0b8db1637c40c286cdb2ec634fd8155 [file] [log] [blame]
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.streaming;
import junit.framework.TestCase;
import java.io.*;
import java.util.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
* This class tests hadoopStreaming in MapReduce local mode.
* It tests the case where number of reducers is set to 0.
In this case, the mappers are expected to write out outputs directly.
No reducer/combiner will be activated.
*/
public class TestStreamReduceNone extends TestCase
{
protected File INPUT_FILE = new File("stream_reduce_none_input.txt");
protected File OUTPUT_DIR = new File("stream_reduce_none_out");
protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
// map parses input lines and generates count entries for each word.
protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{".", "\\n"});
protected String outputExpect = "roses\t\nare\t\nred\t\nviolets\t\nare\t\nblue\t\nbunnies\t\nare\t\npink\t\n";
private StreamJob job;
public TestStreamReduceNone() throws IOException
{
UtilTest utilTest = new UtilTest(getClass().getName());
utilTest.checkUserDir();
utilTest.redirectIfAntJunit();
}
protected void createInput() throws IOException
{
DataOutputStream out = new DataOutputStream(
new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
out.write(input.getBytes("UTF-8"));
out.close();
}
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", "org.apache.hadoop.mapred.lib.IdentityReducer",
"-numReduceTasks", "0",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
public void testCommandLine()
{
String outFileName = "part-00000";
File outFile = null;
try {
try {
OUTPUT_DIR.getAbsoluteFile().delete();
} catch (Exception e) {
}
createInput();
boolean mayExit = false;
// During tests, the default Configuration will use a local mapred
// So don't specify -config or -cluster
job = new StreamJob(genArgs(), mayExit);
job.go();
outFile = new File(OUTPUT_DIR, outFileName).getAbsoluteFile();
String output = StreamUtil.slurp(outFile);
System.err.println("outEx1=" + outputExpect);
System.err.println(" out1=" + output);
assertEquals(outputExpect, output);
} catch(Exception e) {
failTrace(e);
} finally {
outFile.delete();
File outFileCRC = new File(OUTPUT_DIR, "."+outFileName+".crc").getAbsoluteFile();
INPUT_FILE.delete();
outFileCRC.delete();
OUTPUT_DIR.getAbsoluteFile().delete();
}
}
private void failTrace(Exception e)
{
StringWriter sw = new StringWriter();
e.printStackTrace(new PrintWriter(sw));
fail(sw.toString());
}
public static void main(String[]args) throws Exception
{
new TestStreamReduceNone().testCommandLine();
}
}