blob: 9a23cce870a2bc22e76698ad6d469a2ef53778b8 [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.shims;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.ContextFactory;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.task.JobContextImpl;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
public class HadoopShims {
private static Log LOG = LogFactory.getLog(HadoopShims.class);
static public JobContext cloneJobContext(JobContext original) throws IOException, InterruptedException {
JobContext newContext = ContextFactory.cloneContext(original,
new JobConf(original.getConfiguration()));
return newContext;
}
static public TaskAttemptContext createTaskAttemptContext(Configuration conf,
TaskAttemptID taskId) {
if (conf instanceof JobConf) {
return new TaskAttemptContextImpl(new JobConf(conf), taskId);
} else {
return new TaskAttemptContextImpl(conf, taskId);
}
}
static public JobContext createJobContext(Configuration conf,
JobID jobId) {
if (conf instanceof JobConf) {
return new JobContextImpl(new JobConf(conf), jobId);
} else {
return new JobContextImpl(conf, jobId);
}
}
static public boolean isMap(TaskAttemptID taskAttemptID) {
TaskType type = taskAttemptID.getTaskType();
if (type==TaskType.MAP)
return true;
return false;
}
static public TaskAttemptID getNewTaskAttemptID() {
TaskAttemptID taskAttemptID = new TaskAttemptID("", 1, TaskType.MAP,
1, 1);
return taskAttemptID;
}
static public TaskAttemptID createTaskAttemptID(String jtIdentifier, int jobId, boolean isMap,
int taskId, int id) {
if (isMap) {
return new TaskAttemptID(jtIdentifier, jobId, TaskType.MAP, taskId, id);
} else {
return new TaskAttemptID(jtIdentifier, jobId, TaskType.REDUCE, taskId, id);
}
}
/**
* Returns whether the give path has a FileSystem implementation.
*
* @param path path
* @param conf configuration
* @return true if the give path's scheme has a FileSystem implementation,
* false otherwise
*/
public static boolean hasFileSystemImpl(Path path, Configuration conf) {
String scheme = path.toUri().getScheme();
if (scheme != null) {
// Hadoop 0.23
if (conf.get("fs.file.impl") != null) {
String fsImpl = conf.get("fs." + scheme + ".impl");
if (fsImpl == null) {
return false;
}
} else {
try {
Object fs = FileSystem.getFileSystemClass(scheme,conf);
return fs == null ? false : true;
} catch (Exception e) {
return false;
}
}
}
return true;
}
}