blob: a88e883e7a8f74bbe96868d2a2b6064d484c2551 [file] [log] [blame]
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!--
Forrest site.xml
This file contains an outline of the site's information content. It is used to:
- Generate the website menus (though these can be overridden - see docs)
- Provide semantic, location-independent aliases for internal 'site:' URIs, eg
<link href="site:changes"> links to changes.html (or ../changes.html if in
subdir).
- Provide aliases for external URLs in the external-refs section. Eg, <link
href="ext:cocoon"> links to http://xml.apache.org/cocoon/
See http://forrest.apache.org/docs/linking.html for more info.
-->
<site label="Hadoop" href="" xmlns="http://apache.org/forrest/linkmap/1.0">
<docs label="Getting Started">
<overview label="Overview" href="index.html" />
<mapred label="MapReduce Tutorial" href="mapred_tutorial.html" />
<streaming label="Hadoop Streaming" href="streaming.html" />
</docs>
<docs label="Guides">
<distcp label="DistCp" href="distcp.html" />
<vaidya label="Vaidya" href="vaidya.html"/>
<archives label="Hadoop Archives" href="hadoop_archives.html"/>
<gridmix label="Gridmix" href="gridmix.html"/>
<Rumen label="Rumen" href="rumen.html"/>
</docs>
<docs label="Schedulers">
<cap_scheduler label="Capacity Scheduler" href="capacity_scheduler.html"/>
<fair_scheduler label="Fair Scheduler" href="fair_scheduler.html"/>
<dyn_scheduler label="Dynamic Priority Scheduler" href="dynamic_scheduler.html"/>
</docs>
<docs label="Miscellaneous">
<api label="API Docs" href="ext:api/index" />
<jdiff label="API Changes" href="ext:jdiff/changes" />
<wiki label="Wiki" href="ext:wiki" />
<faq label="FAQ" href="ext:faq" />
<relnotes label="Release Notes" href="ext:relnotes" />
<changes label="Change Log" href="ext:changes" />
</docs>
<external-refs>
<site href="http://hadoop.apache.org/mapreduce/"/>
<lists href="http://hadoop.apache.org/mapreduce/mailing_lists.html"/>
<archive href="http://mail-archives.apache.org/mod_mbox/hadoop-mapreduce-commits/"/>
<releases href="http://hadoop.apache.org/mapreduce/releases.html">
<download href="#Download" />
</releases>
<jira href="http://hadoop.apache.org/mapreduce/issue_tracking.html"/>
<wiki href="http://wiki.apache.org/hadoop/MapReduce" />
<faq href="http://wiki.apache.org/hadoop/MapReduce/FAQ" />
<common-default href="http://hadoop.apache.org/common/docs/current/common-default.html" />
<hdfs-default href="http://hadoop.apache.org/hdfs/docs/current/hdfs-default.html" />
<mapred-default href="http://hadoop.apache.org/mapreduce/docs/current/mapred-default.html" />
<mapred-queues href="http://hadoop.apache.org/mapreduce/docs/current/mapred-queues.xml" />
<mapred-queues-capacity-scheduler href="http://hadoop.apache.org/mapreduce/docs/current/mapred-queues-capacity-scheduler.xml" />
<capacity-scheduler-conf href="http://hadoop.apache.org/mapreduce/docs/current/capacity-scheduler-conf.html" />
<single-node-setup href="http://hadoop.apache.org/common/docs/current/single_node_setup.html">
<PreReqs href="#PreReqs" />
<Download href="#Download" />
</single-node-setup>
<cluster-setup href="http://hadoop.apache.org/common/docs/current/cluster_setup.html">
<property_tag href="#property_tag" />
<RefreshingQueueConfiguration href="#Refreshing+queue+configuration"/>
<mapred-queues.xml href="#mapred-queues.xml"/>
<ConfiguringEnvironmentHadoopDaemons href="#Configuring+the+Environment+of+the+Hadoop+Daemons"/>
<ConfiguringMemoryParameters href="#Configuring+Memory+Parameters+for+MapReduce+Jobs" />
<ConfiguringHadoopDaemons href="#Configuring+the+Hadoop+Daemons"/>
<FullyDistributedOperation href="#Fully-Distributed+Operation"/>
</cluster-setup>
<commands-manual href="http://hadoop.apache.org/common/docs/current/commands_default.html">
<RefreshQueues href="#RefreshQueues"/>
</commands-manual>
<zlib href="http://www.zlib.net/" />
<gzip href="http://www.gzip.org/" />
<bzip href="http://www.bzip.org/" />
<cygwin href="http://www.cygwin.com/" />
<osx href="http://www.apple.com/macosx" />
<hod href="">
<cluster-resources href="http://www.clusterresources.com" />
<torque href="http://www.clusterresources.com/pages/products/torque-resource-manager.php" />
<torque-download href="http://www.clusterresources.com/downloads/torque/" />
<torque-docs href="http://www.clusterresources.com/pages/resources/documentation.php" />
<torque-wiki href="http://www.clusterresources.com/wiki/doku.php?id=torque:torque_wiki" />
<torque-mailing-list href="http://www.clusterresources.com/pages/resources/mailing-lists.php" />
<torque-basic-config href="http://www.clusterresources.com/wiki/doku.php?id=torque:1.2_basic_configuration" />
<torque-advanced-config href="http://www.clusterresources.com/wiki/doku.php?id=torque:1.3_advanced_configuration" />
<maui href="http://www.clusterresources.com/pages/products/maui-cluster-scheduler.php"/>
<python href="http://www.python.org" />
<twisted-python href="http://twistedmatrix.com/trac/" />
</hod>
<relnotes href="releasenotes.html" />
<changes href="changes.html" />
<jdiff href="jdiff/">
<changes href="changes.html" />
</jdiff>
<api href="api/">
<index href="index.html" />
<org href="org/">
<apache href="apache/">
<hadoop href="hadoop/">
<conf href="conf/">
<configuration href="Configuration.html">
<final_parameters href="#FinalParams" />
<get href="#get(java.lang.String, java.lang.String)" />
<set href="#set(java.lang.String, java.lang.String)" />
</configuration>
</conf>
<filecache href="filecache/">
<distributedcache href="DistributedCache.html">
<addarchivetoclasspath href="#addArchiveToClassPath(org.apache.hadoop.fs.Path,%20org.apache.hadoop.conf.Configuration)" />
<addfiletoclasspath href="#addFileToClassPath(org.apache.hadoop.fs.Path,%20org.apache.hadoop.conf.Configuration)" />
<addcachefile href="#addCacheFile(java.net.URI,%20org.apache.hadoop.conf.Configuration)" />
<addcachearchive href="#addCacheArchive(java.net.URI,%20org.apache.hadoop.conf.Configuration)" />
<setcachefiles href="#setCacheFiles(java.net.URI[],%20org.apache.hadoop.conf.Configuration)" />
<setcachearchives href="#setCacheArchives(java.net.URI[],%20org.apache.hadoop.conf.Configuration)" />
<createsymlink href="#createSymlink(org.apache.hadoop.conf.Configuration)" />
</distributedcache>
</filecache>
<fs href="fs/">
<filesystem href="FileSystem.html" />
</fs>
<io href="io/">
<closeable href="Closeable.html">
<close href="#close()" />
</closeable>
<sequencefile href="SequenceFile.html" />
<sequencefilecompressiontype href="SequenceFile.CompressionType.html">
<none href="#NONE" />
<record href="#RECORD" />
<block href="#BLOCK" />
</sequencefilecompressiontype>
<writable href="Writable.html" />
<writablecomparable href="WritableComparable.html" />
<compress href="compress/">
<compressioncodec href="CompressionCodec.html" />
</compress>
</io>
<mapreduce href="mapreduce/">
<mapper href="Mapper.html">
<setup href="#setup(org.apache.hadoop.mapreduce.Mapper.Context)" />
</mapper>
<jobcontext href="JobContext.html">
<getcredentials href="#getcredentials" />
</jobcontext>
</mapreduce>
<mapred href="mapred/">
<clusterstatus href="ClusterStatus.html" />
<counters href="Counters.html" />
<fileinputformat href="FileInputFormat.html">
<setinputpaths href="#setInputPaths(org.apache.hadoop.mapred.JobConf,%20org.apache.hadoop.fs.Path[])" />
<addinputpath href="#addInputPath(org.apache.hadoop.mapred.JobConf,%20org.apache.hadoop.fs.Path)" />
<setinputpathstring href="#setInputPaths(org.apache.hadoop.mapred.JobConf,%20java.lang.String)" />
<addinputpathstring href="#addInputPath(org.apache.hadoop.mapred.JobConf,%20java.lang.String)" />
</fileinputformat>
<fileoutputformat href="FileOutputFormat.html">
<getoutputpath href="#getOutputPath(org.apache.hadoop.mapred.JobConf)" />
<getworkoutputpath href="#getWorkOutputPath(org.apache.hadoop.mapred.JobConf)" />
<setoutputpath href="#setOutputPath(org.apache.hadoop.mapred.JobConf,%20org.apache.hadoop.fs.Path)" />
<setcompressoutput href="#setCompressOutput(org.apache.hadoop.mapred.JobConf,%20boolean)" />
<setoutputcompressorclass href="#setOutputCompressorClass(org.apache.hadoop.mapred.JobConf,%20java.lang.Class)" />
</fileoutputformat>
<filesplit href="FileSplit.html" />
<inputformat href="InputFormat.html" />
<inputsplit href="InputSplit.html" />
<jobclient href="JobClient.html">
<runjob href="#runJob(org.apache.hadoop.mapred.JobConf)" />
<submitjob href="#submitJob(org.apache.hadoop.mapred.JobConf)" />
<getdelegationtoken href="#getDelegationToken(org.apache.hadoop.io.Text)" />
</jobclient>
<jobconf href="JobConf.html">
<setnummaptasks href="#setNumMapTasks(int)" />
<setnumreducetasks href="#setNumReduceTasks(int)" />
<setoutputkeycomparatorclass href="#setOutputKeyComparatorClass(java.lang.Class)" />
<setoutputvaluegroupingcomparator href="#setOutputValueGroupingComparator(java.lang.Class)" />
<setcombinerclass href="#setCombinerClass(java.lang.Class)" />
<setmapdebugscript href="#setMapDebugScript(java.lang.String)" />
<setreducedebugscript href="#setReduceDebugScript(java.lang.String)" />
<setmapspeculativeexecution href="#setMapSpeculativeExecution(boolean)" />
<setreducespeculativeexecution href="#setReduceSpeculativeExecution(boolean)" />
<setmaxmapattempts href="#setMaxMapAttempts(int)" />
<setmaxreduceattempts href="#setMaxReduceAttempts(int)" />
<setmaxmaptaskfailurespercent href="#setMaxMapTaskFailuresPercent(int)" />
<setmaxreducetaskfailurespercent href="#setMaxReduceTaskFailuresPercent(int)" />
<setjobendnotificationuri href="#setJobEndNotificationURI(java.lang.String)" />
<setcompressmapoutput href="#setCompressMapOutput(boolean)" />
<setmapoutputcompressorclass href="#setMapOutputCompressorClass(java.lang.Class)" />
<setprofileenabled href="#setProfileEnabled(boolean)" />
<setprofiletaskrange href="#setProfileTaskRange(boolean,%20java.lang.String)" />
<setprofileparams href="#setProfileParams(java.lang.String)" />
<setnumtaskstoexecuteperjvm href="#setNumTasksToExecutePerJvm(int)" />
<setqueuename href="#setQueueName(java.lang.String)" />
<getjoblocaldir href="#getJobLocalDir()" />
<getjar href="#getJar()" />
<getcredentials href="#getCredentials()" />
</jobconf>
<jobconfigurable href="JobConfigurable.html">
<configure href="#configure(org.apache.hadoop.mapred.JobConf)" />
</jobconfigurable>
<jobcontrol href="jobcontrol/">
<package-summary href="package-summary.html" />
</jobcontrol>
<mapper href="Mapper.html">
<map href="#map(K1, V1, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter)" />
</mapper>
<outputcollector href="OutputCollector.html">
<collect href="#collect(K, V)" />
</outputcollector>
<outputcommitter href="OutputCommitter.html" />
<outputformat href="OutputFormat.html" />
<outputlogfilter href="OutputLogFilter.html" />
<sequencefileoutputformat href="SequenceFileOutputFormat.html">
<setoutputcompressiontype href="#setOutputCompressionType(org.apache.hadoop.mapred.JobConf,%20org.apache.hadoop.io.SequenceFile.CompressionType)" />
</sequencefileoutputformat>
<partitioner href="Partitioner.html" />
<recordreader href="RecordReader.html" />
<recordwriter href="RecordWriter.html" />
<reducer href="Reducer.html">
<reduce href="#reduce(K2, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter)" />
</reducer>
<reporter href="Reporter.html">
<incrcounterEnum href="#incrCounter(java.lang.Enum, long)" />
<incrcounterString href="#incrCounter(java.lang.String, java.lang.String, long amount)" />
</reporter>
<runningjob href="RunningJob.html" />
<skipbadrecords href="SkipBadRecords.html">
<setmappermaxskiprecords href="#setMapperMaxSkipRecords(org.apache.hadoop.conf.Configuration, long)"/>
<setreducermaxskipgroups href="#setReducerMaxSkipGroups(org.apache.hadoop.conf.Configuration, long)"/>
<setattemptsTostartskipping href="#setAttemptsToStartSkipping(org.apache.hadoop.conf.Configuration, int)"/>
<setskipoutputpath href="#setSkipOutputPath(org.apache.hadoop.mapred.JobConf, org.apache.hadoop.fs.Path)"/>
<counter_map_processed_records href="#COUNTER_MAP_PROCESSED_RECORDS"/>
<counter_reduce_processed_groups href="#COUNTER_REDUCE_PROCESSED_GROUPS"/>
</skipbadrecords>
<textinputformat href="TextInputFormat.html" />
<textoutputformat href="TextOutputFormat.html" />
<lib href="lib/">
<package-summary href="package-summary.html" />
<hashpartitioner href="HashPartitioner.html" />
<keyfieldbasedpartitioner href="KeyFieldBasedPartitioner.html" />
<keyfieldbasedcomparator href="KeyFieldBasedComparator.html" />
<lazyoutputformat href="LazyOutputFormat.html" />
<aggregate href="aggregate/">
<package-summary href="package-summary.html" />
</aggregate>
</lib>
<pipes href="pipes/">
<package-summary href="package-summary.html" />
</pipes>
</mapred>
<mapreduce href="mapreduce/">
<counter href="Counter.html">
<increment href="#increment(long)"/>
</counter>
<counters href="Counters.html" />
<inputformat href="InputFormat.html">
<getsplits href="#getSplits(org.apache.hadoop.mapreduce.JobContext)"/>
</inputformat>
<inputsplit href="InputSplit.html" />
<job href="Job.html">
<setcombinerclass href="#setCombinerClass(java.lang.Class)"/>
<setgroupingcomparatorclass href="#setGroupingComparatorClass(java.lang.Class)"/>
<setmapperclass href="#setMapperClass(java.lang.Class)"/>
<setmapspeculativeexecution href="#setMapSpeculativeExecution(boolean)"/>
<setmaxmapattempts href="#setMaxMapAttempts(int)"/>
<setmaxreduceattempts href="#setMaxReduceAttempts(int)"/>
<setnummaptasks href="#setNumMapTasks(int)"/>
<setnumreducetasks href="#setNumReduceTasks(int)"/>
<setprofileenabled href="#setProfileEnabled(boolean)"/>
<setprofileparams href="#setProfileParams(java.lang.String)"/>
<setprofiletaskrange href="#setProfileTaskRange(boolean,%20java.lang.String)"/>
<setreducerclass href="#setReducerClass(java.lang.Class)"/>
<setreducespeculativeexecution href="#setReduceSpeculativeExecution(boolean)"/>
<submit href="#submit()"/>
<waitforcompletion href="#waitForCompletion(boolean)"/>
</job>
<lib href="lib/">
<package-summary href="package-summary.html" />
<input href="input/">
<fileinputformat href="FileInputFormat.html">
<setinputpaths
href="#setInputPaths(org.apache.hadoop.mapreduce.Job,%20org.apache.hadoop.fs.Path...)"/>
<addinputpath
href="#addInputPath(org.apache.hadoop.mapreduce.Job,%20org.apache.hadoop.fs.Path)"/>
<setinputpathstring
href="#setInputPaths(org.apache.hadoop.mapreduce.Job,%20java.lang.String)"/>
<addinputpathstring
href="#addInputPaths(org.apache.hadoop.mapreduce.Job,%20java.lang.String)"/>
</fileinputformat>
<filesplit href="FileSplit.html" />
<textinputformat href="TextInputFormat.html" />
</input>
<output href="output/">
<fileoutputcommitter href="FileOutputCommitter.html"/>
<fileoutputformat href="FileOutputFormat.html">
<getoutputpath href="#getOutputPath(org.apache.hadoop.mapreduce.JobContext)" />
<getworkoutputpath
href="#getWorkOutputPath(org.apache.hadoop.mapreduce.TaskInputOutputContext)"/>
<setoutputpath href="#setOutputPath(org.apache.hadoop.mapreduce.Job,%20org.apache.hadoop.fs.Path)" />
<setcompressoutput href="#setCompressOutput(org.apache.hadoop.mapreduce.Job,%20boolean)" />
<setoutputcompressorclass
href="#setOutputCompressorClass(org.apache.hadoop.mapreduce.Job,%20java.lang.Class)" />
</fileoutputformat>
<lazyoutputformat href="LazyOutputFormat.html">
<setoutputformatclass
href="#setOutputFormatClass(org.apache.hadoop.mapreduce.Job,%20java.lang.Class)"/>
</lazyoutputformat>
<sequencefileoutputformat href="SequenceFileOutputFormat.html">
<setoutputcompressiontype
href="#setOutputCompressionType(org.apache.hadoop.mapreduce.Job,%20org.apache.hadoop.io.SequenceFile.CompressionType)"/>
</sequencefileoutputformat>
<textoutputformat href="TextOutputFormat.html" />
</output>
<partition href="partition/">
<hashpartitioner href="HashPartitioner.html" />
</partition>
</lib>
<mapcontext href="MapContext.html"/>
<mapper href="Mapper.html">
<cleanup href="#cleanup(org.apache.hadoop.mapreduce.Mapper.Context)"/>
<map href="#map(KEYIN,%20VALUEIN,%20org.apache.hadoop.mapreduce.Mapper.Context)"/>
<run href="#run(org.apache.hadoop.mapreduce.Mapper.Context)"/>
<setup href="#setup(org.apache.hadoop.mapreduce.Mapper.Context)"/>
</mapper>
<outputcollector href="OutputCollector.html">
<collect href="#collect(K, V)" />
</outputcollector>
<outputcommitter href="OutputCommitter.html" />
<outputformat href="OutputFormat.html" />
<partitioner href="Partitioner.html" />
<recordreader href="RecordReader.html" />
<recordwriter href="RecordWriter.html" />
<reducecontext href="ReduceContext.html"/>
<reducer href="Reducer.html">
<cleanup href="#cleanup(org.apache.hadoop.mapreduce.Reducer.Context)"/>
<reduce
href="#reduce(KEYIN,%20java.lang.Iterable,%20org.apache.hadoop.mapreduce.Reducer.Context)"/>
<run href="#run(org.apache.hadoop.mapreduce.Reducer.Context)"/>
<setup href="#setup(org.apache.hadoop.mapreduce.Reducer.Context)"/>
</reducer>
<task href="task/">
<jobcontextimpl href="JobContextImpl.html">
<getjar href="#getJar()"/>
</jobcontextimpl>
</task>
<taskinputoutputcontext href="TaskInputOutputContext.html">
<write href="#write(KEYOUT,%20VALUEOUT)"/>
<getcounter href="#getCounter(java.lang.Enum)"/>
</taskinputoutputcontext>
</mapreduce>
<net href="net/">
<dnstoswitchmapping href="DNSToSwitchMapping.html">
<resolve href="#resolve(java.util.List)" />
</dnstoswitchmapping>
</net>
<streaming href="streaming/">
<package-summary href="package-summary.html" />
</streaming>
<typedbytes href="typedbytes/">
<package-summary href="package-summary.html" />
</typedbytes>
<util href="util/">
<genericoptionsparser href="GenericOptionsParser.html" />
<progress href="Progress.html" />
<tool href="Tool.html" />
<toolrunner href="ToolRunner.html">
<run href="#run(org.apache.hadoop.util.Tool, java.lang.String[])" />
</toolrunner>
</util>
</hadoop>
</apache>
</org>
</api>
</external-refs>
</site>