| <!DOCTYPE HTML> |
| <html lang="en"> |
| <head> |
| <!-- Generated by javadoc (17) --> |
| <title>Source code</title> |
| <meta name="viewport" content="width=device-width, initial-scale=1"> |
| <meta name="description" content="source: package: org.apache.hadoop.hbase.mapreduce, class: SampleUploader, class: Uploader"> |
| <meta name="generator" content="javadoc/SourceToHTMLConverter"> |
| <link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> |
| </head> |
| <body class="source-page"> |
| <main role="main"> |
| <div class="source-container"> |
| <pre><span class="source-line-no">001</span><span id="line-1">/*</span> |
| <span class="source-line-no">002</span><span id="line-2"> * Licensed to the Apache Software Foundation (ASF) under one</span> |
| <span class="source-line-no">003</span><span id="line-3"> * or more contributor license agreements. See the NOTICE file</span> |
| <span class="source-line-no">004</span><span id="line-4"> * distributed with this work for additional information</span> |
| <span class="source-line-no">005</span><span id="line-5"> * regarding copyright ownership. The ASF licenses this file</span> |
| <span class="source-line-no">006</span><span id="line-6"> * to you under the Apache License, Version 2.0 (the</span> |
| <span class="source-line-no">007</span><span id="line-7"> * "License"); you may not use this file except in compliance</span> |
| <span class="source-line-no">008</span><span id="line-8"> * with the License. You may obtain a copy of the License at</span> |
| <span class="source-line-no">009</span><span id="line-9"> *</span> |
| <span class="source-line-no">010</span><span id="line-10"> * http://www.apache.org/licenses/LICENSE-2.0</span> |
| <span class="source-line-no">011</span><span id="line-11"> *</span> |
| <span class="source-line-no">012</span><span id="line-12"> * Unless required by applicable law or agreed to in writing, software</span> |
| <span class="source-line-no">013</span><span id="line-13"> * distributed under the License is distributed on an "AS IS" BASIS,</span> |
| <span class="source-line-no">014</span><span id="line-14"> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span> |
| <span class="source-line-no">015</span><span id="line-15"> * See the License for the specific language governing permissions and</span> |
| <span class="source-line-no">016</span><span id="line-16"> * limitations under the License.</span> |
| <span class="source-line-no">017</span><span id="line-17"> */</span> |
| <span class="source-line-no">018</span><span id="line-18">package org.apache.hadoop.hbase.mapreduce;</span> |
| <span class="source-line-no">019</span><span id="line-19"></span> |
| <span class="source-line-no">020</span><span id="line-20">import java.io.IOException;</span> |
| <span class="source-line-no">021</span><span id="line-21">import java.util.Iterator;</span> |
| <span class="source-line-no">022</span><span id="line-22">import java.util.List;</span> |
| <span class="source-line-no">023</span><span id="line-23">import org.apache.hadoop.conf.Configuration;</span> |
| <span class="source-line-no">024</span><span id="line-24">import org.apache.hadoop.conf.Configured;</span> |
| <span class="source-line-no">025</span><span id="line-25">import org.apache.hadoop.fs.Path;</span> |
| <span class="source-line-no">026</span><span id="line-26">import org.apache.hadoop.hbase.HBaseConfiguration;</span> |
| <span class="source-line-no">027</span><span id="line-27">import org.apache.hadoop.hbase.client.Put;</span> |
| <span class="source-line-no">028</span><span id="line-28">import org.apache.hadoop.hbase.io.ImmutableBytesWritable;</span> |
| <span class="source-line-no">029</span><span id="line-29">import org.apache.hadoop.hbase.util.Bytes;</span> |
| <span class="source-line-no">030</span><span id="line-30">import org.apache.hadoop.io.LongWritable;</span> |
| <span class="source-line-no">031</span><span id="line-31">import org.apache.hadoop.io.Text;</span> |
| <span class="source-line-no">032</span><span id="line-32">import org.apache.hadoop.mapreduce.Job;</span> |
| <span class="source-line-no">033</span><span id="line-33">import org.apache.hadoop.mapreduce.Mapper;</span> |
| <span class="source-line-no">034</span><span id="line-34">import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;</span> |
| <span class="source-line-no">035</span><span id="line-35">import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;</span> |
| <span class="source-line-no">036</span><span id="line-36">import org.apache.hadoop.util.Tool;</span> |
| <span class="source-line-no">037</span><span id="line-37">import org.apache.hadoop.util.ToolRunner;</span> |
| <span class="source-line-no">038</span><span id="line-38">import org.apache.yetus.audience.InterfaceAudience;</span> |
| <span class="source-line-no">039</span><span id="line-39">import org.slf4j.Logger;</span> |
| <span class="source-line-no">040</span><span id="line-40">import org.slf4j.LoggerFactory;</span> |
| <span class="source-line-no">041</span><span id="line-41"></span> |
| <span class="source-line-no">042</span><span id="line-42">import org.apache.hbase.thirdparty.com.google.common.base.Splitter;</span> |
| <span class="source-line-no">043</span><span id="line-43"></span> |
| <span class="source-line-no">044</span><span id="line-44">/**</span> |
| <span class="source-line-no">045</span><span id="line-45"> * Sample Uploader MapReduce</span> |
| <span class="source-line-no">046</span><span id="line-46"> * <p></span> |
| <span class="source-line-no">047</span><span id="line-47"> * This is EXAMPLE code. You will need to change it to work for your context.</span> |
| <span class="source-line-no">048</span><span id="line-48"> * <p></span> |
| <span class="source-line-no">049</span><span id="line-49"> * Uses {@link TableReducer} to put the data into HBase. Change the InputFormat to suit your data.</span> |
| <span class="source-line-no">050</span><span id="line-50"> * In this example, we are importing a CSV file.</span> |
| <span class="source-line-no">051</span><span id="line-51"> * <p></span> |
| <span class="source-line-no">052</span><span id="line-52"> *</span> |
| <span class="source-line-no">053</span><span id="line-53"> * <pre></span> |
| <span class="source-line-no">054</span><span id="line-54"> * row,family,qualifier,value</span> |
| <span class="source-line-no">055</span><span id="line-55"> * </pre></span> |
| <span class="source-line-no">056</span><span id="line-56"> * <p></span> |
| <span class="source-line-no">057</span><span id="line-57"> * The table and columnfamily we're to insert into must preexist.</span> |
| <span class="source-line-no">058</span><span id="line-58"> * <p></span> |
| <span class="source-line-no">059</span><span id="line-59"> * There is no reducer in this example as it is not necessary and adds significant overhead. If you</span> |
| <span class="source-line-no">060</span><span id="line-60"> * need to do any massaging of data before inserting into HBase, you can do this in the map as well.</span> |
| <span class="source-line-no">061</span><span id="line-61"> * <p></span> |
| <span class="source-line-no">062</span><span id="line-62"> * Do the following to start the MR job:</span> |
| <span class="source-line-no">063</span><span id="line-63"> *</span> |
| <span class="source-line-no">064</span><span id="line-64"> * <pre></span> |
| <span class="source-line-no">065</span><span id="line-65"> * ./bin/hadoop org.apache.hadoop.hbase.mapreduce.SampleUploader /tmp/input.csv TABLE_NAME</span> |
| <span class="source-line-no">066</span><span id="line-66"> * </pre></span> |
| <span class="source-line-no">067</span><span id="line-67"> * <p></span> |
| <span class="source-line-no">068</span><span id="line-68"> * This code was written against HBase 0.21 trunk.</span> |
| <span class="source-line-no">069</span><span id="line-69"> */</span> |
| <span class="source-line-no">070</span><span id="line-70">@InterfaceAudience.Private</span> |
| <span class="source-line-no">071</span><span id="line-71">public class SampleUploader extends Configured implements Tool {</span> |
| <span class="source-line-no">072</span><span id="line-72"> private static final Logger LOG = LoggerFactory.getLogger(SampleUploader.class);</span> |
| <span class="source-line-no">073</span><span id="line-73"></span> |
| <span class="source-line-no">074</span><span id="line-74"> private static final String NAME = "SampleUploader";</span> |
| <span class="source-line-no">075</span><span id="line-75"></span> |
| <span class="source-line-no">076</span><span id="line-76"> static class Uploader extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put> {</span> |
| <span class="source-line-no">077</span><span id="line-77"> private long checkpoint = 100;</span> |
| <span class="source-line-no">078</span><span id="line-78"> private long count = 0;</span> |
| <span class="source-line-no">079</span><span id="line-79"></span> |
| <span class="source-line-no">080</span><span id="line-80"> @Override</span> |
| <span class="source-line-no">081</span><span id="line-81"> public void map(LongWritable key, Text line, Context context) throws IOException {</span> |
| <span class="source-line-no">082</span><span id="line-82"> // Input is a CSV file</span> |
| <span class="source-line-no">083</span><span id="line-83"> // Each map() is a single line, where the key is the line number</span> |
| <span class="source-line-no">084</span><span id="line-84"> // Each line is comma-delimited; row,family,qualifier,value</span> |
| <span class="source-line-no">085</span><span id="line-85"></span> |
| <span class="source-line-no">086</span><span id="line-86"> // Split CSV line</span> |
| <span class="source-line-no">087</span><span id="line-87"> List<String> values = Splitter.on(',').splitToList(line.toString());</span> |
| <span class="source-line-no">088</span><span id="line-88"> if (values.size() != 4) {</span> |
| <span class="source-line-no">089</span><span id="line-89"> return;</span> |
| <span class="source-line-no">090</span><span id="line-90"> }</span> |
| <span class="source-line-no">091</span><span id="line-91"> Iterator<String> i = values.iterator();</span> |
| <span class="source-line-no">092</span><span id="line-92"> // Extract each value</span> |
| <span class="source-line-no">093</span><span id="line-93"> byte[] row = Bytes.toBytes(i.next());</span> |
| <span class="source-line-no">094</span><span id="line-94"> byte[] family = Bytes.toBytes(i.next());</span> |
| <span class="source-line-no">095</span><span id="line-95"> byte[] qualifier = Bytes.toBytes(i.next());</span> |
| <span class="source-line-no">096</span><span id="line-96"> byte[] value = Bytes.toBytes(i.next());</span> |
| <span class="source-line-no">097</span><span id="line-97"></span> |
| <span class="source-line-no">098</span><span id="line-98"> // Create Put</span> |
| <span class="source-line-no">099</span><span id="line-99"> Put put = new Put(row);</span> |
| <span class="source-line-no">100</span><span id="line-100"> put.addColumn(family, qualifier, value);</span> |
| <span class="source-line-no">101</span><span id="line-101"></span> |
| <span class="source-line-no">102</span><span id="line-102"> // Uncomment below to disable WAL. This will improve performance but means</span> |
| <span class="source-line-no">103</span><span id="line-103"> // you will experience data loss in the case of a RegionServer crash.</span> |
| <span class="source-line-no">104</span><span id="line-104"> // put.setWriteToWAL(false);</span> |
| <span class="source-line-no">105</span><span id="line-105"></span> |
| <span class="source-line-no">106</span><span id="line-106"> try {</span> |
| <span class="source-line-no">107</span><span id="line-107"> context.write(new ImmutableBytesWritable(row), put);</span> |
| <span class="source-line-no">108</span><span id="line-108"> } catch (InterruptedException e) {</span> |
| <span class="source-line-no">109</span><span id="line-109"> LOG.error("Interrupted emitting put", e);</span> |
| <span class="source-line-no">110</span><span id="line-110"> Thread.currentThread().interrupt();</span> |
| <span class="source-line-no">111</span><span id="line-111"> }</span> |
| <span class="source-line-no">112</span><span id="line-112"></span> |
| <span class="source-line-no">113</span><span id="line-113"> // Set status every checkpoint lines</span> |
| <span class="source-line-no">114</span><span id="line-114"> if (++count % checkpoint == 0) {</span> |
| <span class="source-line-no">115</span><span id="line-115"> context.setStatus("Emitting Put " + count);</span> |
| <span class="source-line-no">116</span><span id="line-116"> }</span> |
| <span class="source-line-no">117</span><span id="line-117"> }</span> |
| <span class="source-line-no">118</span><span id="line-118"> }</span> |
| <span class="source-line-no">119</span><span id="line-119"></span> |
| <span class="source-line-no">120</span><span id="line-120"> /**</span> |
| <span class="source-line-no">121</span><span id="line-121"> * Job configuration.</span> |
| <span class="source-line-no">122</span><span id="line-122"> */</span> |
| <span class="source-line-no">123</span><span id="line-123"> public static Job configureJob(Configuration conf, String[] args) throws IOException {</span> |
| <span class="source-line-no">124</span><span id="line-124"> Path inputPath = new Path(args[0]);</span> |
| <span class="source-line-no">125</span><span id="line-125"> String tableName = args[1];</span> |
| <span class="source-line-no">126</span><span id="line-126"> Job job = new Job(conf, NAME + "_" + tableName);</span> |
| <span class="source-line-no">127</span><span id="line-127"> job.setJarByClass(Uploader.class);</span> |
| <span class="source-line-no">128</span><span id="line-128"> FileInputFormat.setInputPaths(job, inputPath);</span> |
| <span class="source-line-no">129</span><span id="line-129"> job.setInputFormatClass(SequenceFileInputFormat.class);</span> |
| <span class="source-line-no">130</span><span id="line-130"> job.setMapperClass(Uploader.class);</span> |
| <span class="source-line-no">131</span><span id="line-131"> // No reducers. Just write straight to table. Call initTableReducerJob</span> |
| <span class="source-line-no">132</span><span id="line-132"> // because it sets up the TableOutputFormat.</span> |
| <span class="source-line-no">133</span><span id="line-133"> TableMapReduceUtil.initTableReducerJob(tableName, null, job);</span> |
| <span class="source-line-no">134</span><span id="line-134"> job.setNumReduceTasks(0);</span> |
| <span class="source-line-no">135</span><span id="line-135"> return job;</span> |
| <span class="source-line-no">136</span><span id="line-136"> }</span> |
| <span class="source-line-no">137</span><span id="line-137"></span> |
| <span class="source-line-no">138</span><span id="line-138"> /**</span> |
| <span class="source-line-no">139</span><span id="line-139"> * Main entry point.</span> |
| <span class="source-line-no">140</span><span id="line-140"> * @param otherArgs The command line parameters after ToolRunner handles standard.</span> |
| <span class="source-line-no">141</span><span id="line-141"> * @throws Exception When running the job fails.</span> |
| <span class="source-line-no">142</span><span id="line-142"> */</span> |
| <span class="source-line-no">143</span><span id="line-143"> @Override</span> |
| <span class="source-line-no">144</span><span id="line-144"> public int run(String[] otherArgs) throws Exception {</span> |
| <span class="source-line-no">145</span><span id="line-145"> if (otherArgs.length != 2) {</span> |
| <span class="source-line-no">146</span><span id="line-146"> System.err.println("Wrong number of arguments: " + otherArgs.length);</span> |
| <span class="source-line-no">147</span><span id="line-147"> System.err.println("Usage: " + NAME + " <input> <tablename>");</span> |
| <span class="source-line-no">148</span><span id="line-148"> return -1;</span> |
| <span class="source-line-no">149</span><span id="line-149"> }</span> |
| <span class="source-line-no">150</span><span id="line-150"> Job job = configureJob(getConf(), otherArgs);</span> |
| <span class="source-line-no">151</span><span id="line-151"> return (job.waitForCompletion(true) ? 0 : 1);</span> |
| <span class="source-line-no">152</span><span id="line-152"> }</span> |
| <span class="source-line-no">153</span><span id="line-153"></span> |
| <span class="source-line-no">154</span><span id="line-154"> public static void main(String[] args) throws Exception {</span> |
| <span class="source-line-no">155</span><span id="line-155"> int status = ToolRunner.run(HBaseConfiguration.create(), new SampleUploader(), args);</span> |
| <span class="source-line-no">156</span><span id="line-156"> System.exit(status);</span> |
| <span class="source-line-no">157</span><span id="line-157"> }</span> |
| <span class="source-line-no">158</span><span id="line-158">}</span> |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| </pre> |
| </div> |
| </main> |
| </body> |
| </html> |