blob: 1c4751ef105bc0a987912ea917050a1c3eee426f [file] [log] [blame]
<!DOCTYPE HTML>
<html lang="en">
<head>
<!-- Generated by javadoc (17) -->
<title>Source code</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="source: package: org.apache.hadoop.hbase.backup.impl, class: IncrementalTableBackupClient">
<meta name="generator" content="javadoc/SourceToHTMLConverter">
<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
</head>
<body class="source-page">
<main role="main">
<div class="source-container">
<pre><span class="source-line-no">001</span><span id="line-1">/*</span>
<span class="source-line-no">002</span><span id="line-2"> * Licensed to the Apache Software Foundation (ASF) under one</span>
<span class="source-line-no">003</span><span id="line-3"> * or more contributor license agreements. See the NOTICE file</span>
<span class="source-line-no">004</span><span id="line-4"> * distributed with this work for additional information</span>
<span class="source-line-no">005</span><span id="line-5"> * regarding copyright ownership. The ASF licenses this file</span>
<span class="source-line-no">006</span><span id="line-6"> * to you under the Apache License, Version 2.0 (the</span>
<span class="source-line-no">007</span><span id="line-7"> * "License"); you may not use this file except in compliance</span>
<span class="source-line-no">008</span><span id="line-8"> * with the License. You may obtain a copy of the License at</span>
<span class="source-line-no">009</span><span id="line-9"> *</span>
<span class="source-line-no">010</span><span id="line-10"> * http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="source-line-no">011</span><span id="line-11"> *</span>
<span class="source-line-no">012</span><span id="line-12"> * Unless required by applicable law or agreed to in writing, software</span>
<span class="source-line-no">013</span><span id="line-13"> * distributed under the License is distributed on an "AS IS" BASIS,</span>
<span class="source-line-no">014</span><span id="line-14"> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span>
<span class="source-line-no">015</span><span id="line-15"> * See the License for the specific language governing permissions and</span>
<span class="source-line-no">016</span><span id="line-16"> * limitations under the License.</span>
<span class="source-line-no">017</span><span id="line-17"> */</span>
<span class="source-line-no">018</span><span id="line-18">package org.apache.hadoop.hbase.backup.impl;</span>
<span class="source-line-no">019</span><span id="line-19"></span>
<span class="source-line-no">020</span><span id="line-20">import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.JOB_NAME_CONF_KEY;</span>
<span class="source-line-no">021</span><span id="line-21"></span>
<span class="source-line-no">022</span><span id="line-22">import java.io.IOException;</span>
<span class="source-line-no">023</span><span id="line-23">import java.net.URI;</span>
<span class="source-line-no">024</span><span id="line-24">import java.net.URISyntaxException;</span>
<span class="source-line-no">025</span><span id="line-25">import java.util.ArrayList;</span>
<span class="source-line-no">026</span><span id="line-26">import java.util.HashMap;</span>
<span class="source-line-no">027</span><span id="line-27">import java.util.List;</span>
<span class="source-line-no">028</span><span id="line-28">import java.util.Map;</span>
<span class="source-line-no">029</span><span id="line-29">import java.util.Set;</span>
<span class="source-line-no">030</span><span id="line-30">import org.apache.commons.io.FilenameUtils;</span>
<span class="source-line-no">031</span><span id="line-31">import org.apache.commons.lang3.StringUtils;</span>
<span class="source-line-no">032</span><span id="line-32">import org.apache.hadoop.fs.FileSystem;</span>
<span class="source-line-no">033</span><span id="line-33">import org.apache.hadoop.fs.LocatedFileStatus;</span>
<span class="source-line-no">034</span><span id="line-34">import org.apache.hadoop.fs.Path;</span>
<span class="source-line-no">035</span><span id="line-35">import org.apache.hadoop.fs.RemoteIterator;</span>
<span class="source-line-no">036</span><span id="line-36">import org.apache.hadoop.hbase.TableName;</span>
<span class="source-line-no">037</span><span id="line-37">import org.apache.hadoop.hbase.backup.BackupCopyJob;</span>
<span class="source-line-no">038</span><span id="line-38">import org.apache.hadoop.hbase.backup.BackupInfo;</span>
<span class="source-line-no">039</span><span id="line-39">import org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase;</span>
<span class="source-line-no">040</span><span id="line-40">import org.apache.hadoop.hbase.backup.BackupRequest;</span>
<span class="source-line-no">041</span><span id="line-41">import org.apache.hadoop.hbase.backup.BackupRestoreFactory;</span>
<span class="source-line-no">042</span><span id="line-42">import org.apache.hadoop.hbase.backup.BackupType;</span>
<span class="source-line-no">043</span><span id="line-43">import org.apache.hadoop.hbase.backup.HBackupFileSystem;</span>
<span class="source-line-no">044</span><span id="line-44">import org.apache.hadoop.hbase.backup.mapreduce.MapReduceBackupCopyJob;</span>
<span class="source-line-no">045</span><span id="line-45">import org.apache.hadoop.hbase.backup.mapreduce.MapReduceHFileSplitterJob;</span>
<span class="source-line-no">046</span><span id="line-46">import org.apache.hadoop.hbase.backup.util.BackupUtils;</span>
<span class="source-line-no">047</span><span id="line-47">import org.apache.hadoop.hbase.client.Admin;</span>
<span class="source-line-no">048</span><span id="line-48">import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;</span>
<span class="source-line-no">049</span><span id="line-49">import org.apache.hadoop.hbase.client.Connection;</span>
<span class="source-line-no">050</span><span id="line-50">import org.apache.hadoop.hbase.io.hfile.HFile;</span>
<span class="source-line-no">051</span><span id="line-51">import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;</span>
<span class="source-line-no">052</span><span id="line-52">import org.apache.hadoop.hbase.mapreduce.WALPlayer;</span>
<span class="source-line-no">053</span><span id="line-53">import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;</span>
<span class="source-line-no">054</span><span id="line-54">import org.apache.hadoop.hbase.snapshot.SnapshotManifest;</span>
<span class="source-line-no">055</span><span id="line-55">import org.apache.hadoop.hbase.snapshot.SnapshotRegionLocator;</span>
<span class="source-line-no">056</span><span id="line-56">import org.apache.hadoop.hbase.snapshot.SnapshotTTLExpiredException;</span>
<span class="source-line-no">057</span><span id="line-57">import org.apache.hadoop.hbase.util.CommonFSUtils;</span>
<span class="source-line-no">058</span><span id="line-58">import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;</span>
<span class="source-line-no">059</span><span id="line-59">import org.apache.hadoop.hbase.util.HFileArchiveUtil;</span>
<span class="source-line-no">060</span><span id="line-60">import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;</span>
<span class="source-line-no">061</span><span id="line-61">import org.apache.hadoop.util.Tool;</span>
<span class="source-line-no">062</span><span id="line-62">import org.apache.yetus.audience.InterfaceAudience;</span>
<span class="source-line-no">063</span><span id="line-63">import org.slf4j.Logger;</span>
<span class="source-line-no">064</span><span id="line-64">import org.slf4j.LoggerFactory;</span>
<span class="source-line-no">065</span><span id="line-65"></span>
<span class="source-line-no">066</span><span id="line-66">import org.apache.hbase.thirdparty.com.google.common.collect.Lists;</span>
<span class="source-line-no">067</span><span id="line-67"></span>
<span class="source-line-no">068</span><span id="line-68">import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;</span>
<span class="source-line-no">069</span><span id="line-69">import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos;</span>
<span class="source-line-no">070</span><span id="line-70"></span>
<span class="source-line-no">071</span><span id="line-71">/**</span>
<span class="source-line-no">072</span><span id="line-72"> * Incremental backup implementation. See the {@link #execute() execute} method.</span>
<span class="source-line-no">073</span><span id="line-73"> */</span>
<span class="source-line-no">074</span><span id="line-74">@InterfaceAudience.Private</span>
<span class="source-line-no">075</span><span id="line-75">public class IncrementalTableBackupClient extends TableBackupClient {</span>
<span class="source-line-no">076</span><span id="line-76"> private static final Logger LOG = LoggerFactory.getLogger(IncrementalTableBackupClient.class);</span>
<span class="source-line-no">077</span><span id="line-77"></span>
<span class="source-line-no">078</span><span id="line-78"> protected IncrementalTableBackupClient() {</span>
<span class="source-line-no">079</span><span id="line-79"> }</span>
<span class="source-line-no">080</span><span id="line-80"></span>
<span class="source-line-no">081</span><span id="line-81"> public IncrementalTableBackupClient(final Connection conn, final String backupId,</span>
<span class="source-line-no">082</span><span id="line-82"> BackupRequest request) throws IOException {</span>
<span class="source-line-no">083</span><span id="line-83"> super(conn, backupId, request);</span>
<span class="source-line-no">084</span><span id="line-84"> }</span>
<span class="source-line-no">085</span><span id="line-85"></span>
<span class="source-line-no">086</span><span id="line-86"> protected List&lt;String&gt; filterMissingFiles(List&lt;String&gt; incrBackupFileList) throws IOException {</span>
<span class="source-line-no">087</span><span id="line-87"> List&lt;String&gt; list = new ArrayList&lt;&gt;();</span>
<span class="source-line-no">088</span><span id="line-88"> for (String file : incrBackupFileList) {</span>
<span class="source-line-no">089</span><span id="line-89"> Path p = new Path(file);</span>
<span class="source-line-no">090</span><span id="line-90"> if (fs.exists(p) || isActiveWalPath(p)) {</span>
<span class="source-line-no">091</span><span id="line-91"> list.add(file);</span>
<span class="source-line-no">092</span><span id="line-92"> } else {</span>
<span class="source-line-no">093</span><span id="line-93"> LOG.warn("Can't find file: " + file);</span>
<span class="source-line-no">094</span><span id="line-94"> }</span>
<span class="source-line-no">095</span><span id="line-95"> }</span>
<span class="source-line-no">096</span><span id="line-96"> return list;</span>
<span class="source-line-no">097</span><span id="line-97"> }</span>
<span class="source-line-no">098</span><span id="line-98"></span>
<span class="source-line-no">099</span><span id="line-99"> /**</span>
<span class="source-line-no">100</span><span id="line-100"> * Check if a given path is belongs to active WAL directory</span>
<span class="source-line-no">101</span><span id="line-101"> * @param p path</span>
<span class="source-line-no">102</span><span id="line-102"> * @return true, if yes</span>
<span class="source-line-no">103</span><span id="line-103"> */</span>
<span class="source-line-no">104</span><span id="line-104"> protected boolean isActiveWalPath(Path p) {</span>
<span class="source-line-no">105</span><span id="line-105"> return !AbstractFSWALProvider.isArchivedLogFile(p);</span>
<span class="source-line-no">106</span><span id="line-106"> }</span>
<span class="source-line-no">107</span><span id="line-107"></span>
<span class="source-line-no">108</span><span id="line-108"> protected static int getIndex(TableName tbl, List&lt;TableName&gt; sTableList) {</span>
<span class="source-line-no">109</span><span id="line-109"> if (sTableList == null) {</span>
<span class="source-line-no">110</span><span id="line-110"> return 0;</span>
<span class="source-line-no">111</span><span id="line-111"> }</span>
<span class="source-line-no">112</span><span id="line-112"></span>
<span class="source-line-no">113</span><span id="line-113"> for (int i = 0; i &lt; sTableList.size(); i++) {</span>
<span class="source-line-no">114</span><span id="line-114"> if (tbl.equals(sTableList.get(i))) {</span>
<span class="source-line-no">115</span><span id="line-115"> return i;</span>
<span class="source-line-no">116</span><span id="line-116"> }</span>
<span class="source-line-no">117</span><span id="line-117"> }</span>
<span class="source-line-no">118</span><span id="line-118"> return -1;</span>
<span class="source-line-no">119</span><span id="line-119"> }</span>
<span class="source-line-no">120</span><span id="line-120"></span>
<span class="source-line-no">121</span><span id="line-121"> /**</span>
<span class="source-line-no">122</span><span id="line-122"> * Reads bulk load records from backup table, iterates through the records and forms the paths for</span>
<span class="source-line-no">123</span><span id="line-123"> * bulk loaded hfiles. Copies the bulk loaded hfiles to backup destination. This method does NOT</span>
<span class="source-line-no">124</span><span id="line-124"> * clean up the entries in the bulk load system table. Those entries should not be cleaned until</span>
<span class="source-line-no">125</span><span id="line-125"> * the backup is marked as complete.</span>
<span class="source-line-no">126</span><span id="line-126"> * @param tablesToBackup list of tables to be backed up</span>
<span class="source-line-no">127</span><span id="line-127"> */</span>
<span class="source-line-no">128</span><span id="line-128"> protected List&lt;BulkLoad&gt; handleBulkLoad(List&lt;TableName&gt; tablesToBackup) throws IOException {</span>
<span class="source-line-no">129</span><span id="line-129"> Map&lt;TableName, MergeSplitBulkloadInfo&gt; toBulkload = new HashMap&lt;&gt;();</span>
<span class="source-line-no">130</span><span id="line-130"> List&lt;BulkLoad&gt; bulkLoads = backupManager.readBulkloadRows(tablesToBackup);</span>
<span class="source-line-no">131</span><span id="line-131"> FileSystem tgtFs;</span>
<span class="source-line-no">132</span><span id="line-132"> try {</span>
<span class="source-line-no">133</span><span id="line-133"> tgtFs = FileSystem.get(new URI(backupInfo.getBackupRootDir()), conf);</span>
<span class="source-line-no">134</span><span id="line-134"> } catch (URISyntaxException use) {</span>
<span class="source-line-no">135</span><span id="line-135"> throw new IOException("Unable to get FileSystem", use);</span>
<span class="source-line-no">136</span><span id="line-136"> }</span>
<span class="source-line-no">137</span><span id="line-137"> Path rootdir = CommonFSUtils.getRootDir(conf);</span>
<span class="source-line-no">138</span><span id="line-138"> Path tgtRoot = new Path(new Path(backupInfo.getBackupRootDir()), backupId);</span>
<span class="source-line-no">139</span><span id="line-139"></span>
<span class="source-line-no">140</span><span id="line-140"> for (BulkLoad bulkLoad : bulkLoads) {</span>
<span class="source-line-no">141</span><span id="line-141"> TableName srcTable = bulkLoad.getTableName();</span>
<span class="source-line-no">142</span><span id="line-142"> MergeSplitBulkloadInfo bulkloadInfo =</span>
<span class="source-line-no">143</span><span id="line-143"> toBulkload.computeIfAbsent(srcTable, MergeSplitBulkloadInfo::new);</span>
<span class="source-line-no">144</span><span id="line-144"> String regionName = bulkLoad.getRegion();</span>
<span class="source-line-no">145</span><span id="line-145"> String fam = bulkLoad.getColumnFamily();</span>
<span class="source-line-no">146</span><span id="line-146"> String filename = FilenameUtils.getName(bulkLoad.getHfilePath());</span>
<span class="source-line-no">147</span><span id="line-147"></span>
<span class="source-line-no">148</span><span id="line-148"> if (!tablesToBackup.contains(srcTable)) {</span>
<span class="source-line-no">149</span><span id="line-149"> LOG.debug("Skipping {} since it is not in tablesToBackup", srcTable);</span>
<span class="source-line-no">150</span><span id="line-150"> continue;</span>
<span class="source-line-no">151</span><span id="line-151"> }</span>
<span class="source-line-no">152</span><span id="line-152"> Path tblDir = CommonFSUtils.getTableDir(rootdir, srcTable);</span>
<span class="source-line-no">153</span><span id="line-153"> Path p = new Path(tblDir, regionName + Path.SEPARATOR + fam + Path.SEPARATOR + filename);</span>
<span class="source-line-no">154</span><span id="line-154"></span>
<span class="source-line-no">155</span><span id="line-155"> String srcTableQualifier = srcTable.getQualifierAsString();</span>
<span class="source-line-no">156</span><span id="line-156"> String srcTableNs = srcTable.getNamespaceAsString();</span>
<span class="source-line-no">157</span><span id="line-157"> Path tgtFam = new Path(tgtRoot, srcTableNs + Path.SEPARATOR + srcTableQualifier</span>
<span class="source-line-no">158</span><span id="line-158"> + Path.SEPARATOR + regionName + Path.SEPARATOR + fam);</span>
<span class="source-line-no">159</span><span id="line-159"> if (!tgtFs.mkdirs(tgtFam)) {</span>
<span class="source-line-no">160</span><span id="line-160"> throw new IOException("couldn't create " + tgtFam);</span>
<span class="source-line-no">161</span><span id="line-161"> }</span>
<span class="source-line-no">162</span><span id="line-162"> Path tgt = new Path(tgtFam, filename);</span>
<span class="source-line-no">163</span><span id="line-163"></span>
<span class="source-line-no">164</span><span id="line-164"> Path archiveDir = HFileArchiveUtil.getStoreArchivePath(conf, srcTable, regionName, fam);</span>
<span class="source-line-no">165</span><span id="line-165"> Path archive = new Path(archiveDir, filename);</span>
<span class="source-line-no">166</span><span id="line-166"></span>
<span class="source-line-no">167</span><span id="line-167"> if (fs.exists(p)) {</span>
<span class="source-line-no">168</span><span id="line-168"> if (LOG.isTraceEnabled()) {</span>
<span class="source-line-no">169</span><span id="line-169"> LOG.trace("found bulk hfile {} in {} for {}", bulkLoad.getHfilePath(), p.getParent(),</span>
<span class="source-line-no">170</span><span id="line-170"> srcTableQualifier);</span>
<span class="source-line-no">171</span><span id="line-171"> LOG.trace("copying {} to {}", p, tgt);</span>
<span class="source-line-no">172</span><span id="line-172"> }</span>
<span class="source-line-no">173</span><span id="line-173"> bulkloadInfo.addActiveFile(p.toString());</span>
<span class="source-line-no">174</span><span id="line-174"> } else if (fs.exists(archive)) {</span>
<span class="source-line-no">175</span><span id="line-175"> LOG.debug("copying archive {} to {}", archive, tgt);</span>
<span class="source-line-no">176</span><span id="line-176"> bulkloadInfo.addArchiveFiles(archive.toString());</span>
<span class="source-line-no">177</span><span id="line-177"> }</span>
<span class="source-line-no">178</span><span id="line-178"> }</span>
<span class="source-line-no">179</span><span id="line-179"></span>
<span class="source-line-no">180</span><span id="line-180"> for (MergeSplitBulkloadInfo bulkloadInfo : toBulkload.values()) {</span>
<span class="source-line-no">181</span><span id="line-181"> mergeSplitAndCopyBulkloadedHFiles(bulkloadInfo.getActiveFiles(),</span>
<span class="source-line-no">182</span><span id="line-182"> bulkloadInfo.getArchiveFiles(), bulkloadInfo.getSrcTable(), tgtFs);</span>
<span class="source-line-no">183</span><span id="line-183"> }</span>
<span class="source-line-no">184</span><span id="line-184"></span>
<span class="source-line-no">185</span><span id="line-185"> return bulkLoads;</span>
<span class="source-line-no">186</span><span id="line-186"> }</span>
<span class="source-line-no">187</span><span id="line-187"></span>
<span class="source-line-no">188</span><span id="line-188"> private void mergeSplitAndCopyBulkloadedHFiles(List&lt;String&gt; activeFiles,</span>
<span class="source-line-no">189</span><span id="line-189"> List&lt;String&gt; archiveFiles, TableName tn, FileSystem tgtFs) throws IOException {</span>
<span class="source-line-no">190</span><span id="line-190"> int attempt = 1;</span>
<span class="source-line-no">191</span><span id="line-191"></span>
<span class="source-line-no">192</span><span id="line-192"> while (!activeFiles.isEmpty()) {</span>
<span class="source-line-no">193</span><span id="line-193"> LOG.info("MergeSplit {} active bulk loaded files. Attempt={}", activeFiles.size(), attempt++);</span>
<span class="source-line-no">194</span><span id="line-194"> // Active file can be archived during copy operation,</span>
<span class="source-line-no">195</span><span id="line-195"> // we need to handle this properly</span>
<span class="source-line-no">196</span><span id="line-196"> try {</span>
<span class="source-line-no">197</span><span id="line-197"> mergeSplitAndCopyBulkloadedHFiles(activeFiles, tn, tgtFs);</span>
<span class="source-line-no">198</span><span id="line-198"> break;</span>
<span class="source-line-no">199</span><span id="line-199"> } catch (IOException e) {</span>
<span class="source-line-no">200</span><span id="line-200"> int numActiveFiles = activeFiles.size();</span>
<span class="source-line-no">201</span><span id="line-201"> updateFileLists(activeFiles, archiveFiles);</span>
<span class="source-line-no">202</span><span id="line-202"> if (activeFiles.size() &lt; numActiveFiles) {</span>
<span class="source-line-no">203</span><span id="line-203"> continue;</span>
<span class="source-line-no">204</span><span id="line-204"> }</span>
<span class="source-line-no">205</span><span id="line-205"></span>
<span class="source-line-no">206</span><span id="line-206"> throw e;</span>
<span class="source-line-no">207</span><span id="line-207"> }</span>
<span class="source-line-no">208</span><span id="line-208"> }</span>
<span class="source-line-no">209</span><span id="line-209"></span>
<span class="source-line-no">210</span><span id="line-210"> if (!archiveFiles.isEmpty()) {</span>
<span class="source-line-no">211</span><span id="line-211"> mergeSplitAndCopyBulkloadedHFiles(archiveFiles, tn, tgtFs);</span>
<span class="source-line-no">212</span><span id="line-212"> }</span>
<span class="source-line-no">213</span><span id="line-213"> }</span>
<span class="source-line-no">214</span><span id="line-214"></span>
<span class="source-line-no">215</span><span id="line-215"> private void mergeSplitAndCopyBulkloadedHFiles(List&lt;String&gt; files, TableName tn, FileSystem tgtFs)</span>
<span class="source-line-no">216</span><span id="line-216"> throws IOException {</span>
<span class="source-line-no">217</span><span id="line-217"> MapReduceHFileSplitterJob player = new MapReduceHFileSplitterJob();</span>
<span class="source-line-no">218</span><span id="line-218"> conf.set(MapReduceHFileSplitterJob.BULK_OUTPUT_CONF_KEY,</span>
<span class="source-line-no">219</span><span id="line-219"> getBulkOutputDirForTable(tn).toString());</span>
<span class="source-line-no">220</span><span id="line-220"> player.setConf(conf);</span>
<span class="source-line-no">221</span><span id="line-221"></span>
<span class="source-line-no">222</span><span id="line-222"> String inputDirs = StringUtils.join(files, ",");</span>
<span class="source-line-no">223</span><span id="line-223"> String[] args = { inputDirs, tn.getNameWithNamespaceInclAsString() };</span>
<span class="source-line-no">224</span><span id="line-224"></span>
<span class="source-line-no">225</span><span id="line-225"> int result;</span>
<span class="source-line-no">226</span><span id="line-226"></span>
<span class="source-line-no">227</span><span id="line-227"> try {</span>
<span class="source-line-no">228</span><span id="line-228"> result = player.run(args);</span>
<span class="source-line-no">229</span><span id="line-229"> } catch (Exception e) {</span>
<span class="source-line-no">230</span><span id="line-230"> LOG.error("Failed to run MapReduceHFileSplitterJob", e);</span>
<span class="source-line-no">231</span><span id="line-231"> // Delete the bulkload directory if we fail to run the HFile splitter job for any reason</span>
<span class="source-line-no">232</span><span id="line-232"> // as it might be re-tried</span>
<span class="source-line-no">233</span><span id="line-233"> deleteBulkLoadDirectory();</span>
<span class="source-line-no">234</span><span id="line-234"> throw new IOException(e);</span>
<span class="source-line-no">235</span><span id="line-235"> }</span>
<span class="source-line-no">236</span><span id="line-236"></span>
<span class="source-line-no">237</span><span id="line-237"> if (result != 0) {</span>
<span class="source-line-no">238</span><span id="line-238"> throw new IOException(</span>
<span class="source-line-no">239</span><span id="line-239"> "Failed to run MapReduceHFileSplitterJob with invalid result: " + result);</span>
<span class="source-line-no">240</span><span id="line-240"> }</span>
<span class="source-line-no">241</span><span id="line-241"></span>
<span class="source-line-no">242</span><span id="line-242"> incrementalCopyBulkloadHFiles(tgtFs, tn);</span>
<span class="source-line-no">243</span><span id="line-243"> }</span>
<span class="source-line-no">244</span><span id="line-244"></span>
<span class="source-line-no">245</span><span id="line-245"> private void updateFileLists(List&lt;String&gt; activeFiles, List&lt;String&gt; archiveFiles)</span>
<span class="source-line-no">246</span><span id="line-246"> throws IOException {</span>
<span class="source-line-no">247</span><span id="line-247"> List&lt;String&gt; newlyArchived = new ArrayList&lt;&gt;();</span>
<span class="source-line-no">248</span><span id="line-248"></span>
<span class="source-line-no">249</span><span id="line-249"> for (String spath : activeFiles) {</span>
<span class="source-line-no">250</span><span id="line-250"> if (!fs.exists(new Path(spath))) {</span>
<span class="source-line-no">251</span><span id="line-251"> newlyArchived.add(spath);</span>
<span class="source-line-no">252</span><span id="line-252"> }</span>
<span class="source-line-no">253</span><span id="line-253"> }</span>
<span class="source-line-no">254</span><span id="line-254"></span>
<span class="source-line-no">255</span><span id="line-255"> if (newlyArchived.size() &gt; 0) {</span>
<span class="source-line-no">256</span><span id="line-256"> activeFiles.removeAll(newlyArchived);</span>
<span class="source-line-no">257</span><span id="line-257"> archiveFiles.addAll(newlyArchived);</span>
<span class="source-line-no">258</span><span id="line-258"> }</span>
<span class="source-line-no">259</span><span id="line-259"></span>
<span class="source-line-no">260</span><span id="line-260"> LOG.debug(newlyArchived.size() + " files have been archived.");</span>
<span class="source-line-no">261</span><span id="line-261"> }</span>
<span class="source-line-no">262</span><span id="line-262"></span>
<span class="source-line-no">263</span><span id="line-263"> /**</span>
<span class="source-line-no">264</span><span id="line-264"> * @throws IOException If the execution of the backup fails</span>
<span class="source-line-no">265</span><span id="line-265"> * @throws ColumnFamilyMismatchException If the column families of the current table do not match</span>
<span class="source-line-no">266</span><span id="line-266"> * the column families for the last full backup. In which</span>
<span class="source-line-no">267</span><span id="line-267"> * case, a full backup should be taken</span>
<span class="source-line-no">268</span><span id="line-268"> */</span>
<span class="source-line-no">269</span><span id="line-269"> @Override</span>
<span class="source-line-no">270</span><span id="line-270"> public void execute() throws IOException, ColumnFamilyMismatchException {</span>
<span class="source-line-no">271</span><span id="line-271"> try {</span>
<span class="source-line-no">272</span><span id="line-272"> Map&lt;TableName, String&gt; tablesToFullBackupIds = getFullBackupIds();</span>
<span class="source-line-no">273</span><span id="line-273"> verifyCfCompatibility(backupInfo.getTables(), tablesToFullBackupIds);</span>
<span class="source-line-no">274</span><span id="line-274"></span>
<span class="source-line-no">275</span><span id="line-275"> // case PREPARE_INCREMENTAL:</span>
<span class="source-line-no">276</span><span id="line-276"> beginBackup(backupManager, backupInfo);</span>
<span class="source-line-no">277</span><span id="line-277"> backupInfo.setPhase(BackupPhase.PREPARE_INCREMENTAL);</span>
<span class="source-line-no">278</span><span id="line-278"> LOG.debug("For incremental backup, current table set is "</span>
<span class="source-line-no">279</span><span id="line-279"> + backupManager.getIncrementalBackupTableSet());</span>
<span class="source-line-no">280</span><span id="line-280"> newTimestamps = ((IncrementalBackupManager) backupManager).getIncrBackupLogFileMap();</span>
<span class="source-line-no">281</span><span id="line-281"> } catch (Exception e) {</span>
<span class="source-line-no">282</span><span id="line-282"> // fail the overall backup and return</span>
<span class="source-line-no">283</span><span id="line-283"> failBackup(conn, backupInfo, backupManager, e, "Unexpected Exception : ",</span>
<span class="source-line-no">284</span><span id="line-284"> BackupType.INCREMENTAL, conf);</span>
<span class="source-line-no">285</span><span id="line-285"> throw new IOException(e);</span>
<span class="source-line-no">286</span><span id="line-286"> }</span>
<span class="source-line-no">287</span><span id="line-287"></span>
<span class="source-line-no">288</span><span id="line-288"> // case INCREMENTAL_COPY:</span>
<span class="source-line-no">289</span><span id="line-289"> try {</span>
<span class="source-line-no">290</span><span id="line-290"> // copy out the table and region info files for each table</span>
<span class="source-line-no">291</span><span id="line-291"> BackupUtils.copyTableRegionInfo(conn, backupInfo, conf);</span>
<span class="source-line-no">292</span><span id="line-292"> setupRegionLocator();</span>
<span class="source-line-no">293</span><span id="line-293"> // convert WAL to HFiles and copy them to .tmp under BACKUP_ROOT</span>
<span class="source-line-no">294</span><span id="line-294"> convertWALsToHFiles();</span>
<span class="source-line-no">295</span><span id="line-295"> incrementalCopyHFiles(new String[] { getBulkOutputDir().toString() },</span>
<span class="source-line-no">296</span><span id="line-296"> backupInfo.getBackupRootDir());</span>
<span class="source-line-no">297</span><span id="line-297"> } catch (Exception e) {</span>
<span class="source-line-no">298</span><span id="line-298"> String msg = "Unexpected exception in incremental-backup: incremental copy " + backupId;</span>
<span class="source-line-no">299</span><span id="line-299"> // fail the overall backup and return</span>
<span class="source-line-no">300</span><span id="line-300"> failBackup(conn, backupInfo, backupManager, e, msg, BackupType.INCREMENTAL, conf);</span>
<span class="source-line-no">301</span><span id="line-301"> throw new IOException(e);</span>
<span class="source-line-no">302</span><span id="line-302"> }</span>
<span class="source-line-no">303</span><span id="line-303"> // case INCR_BACKUP_COMPLETE:</span>
<span class="source-line-no">304</span><span id="line-304"> // set overall backup status: complete. Here we make sure to complete the backup.</span>
<span class="source-line-no">305</span><span id="line-305"> // After this checkpoint, even if entering cancel process, will let the backup finished</span>
<span class="source-line-no">306</span><span id="line-306"> try {</span>
<span class="source-line-no">307</span><span id="line-307"> // Set the previousTimestampMap which is before this current log roll to the manifest.</span>
<span class="source-line-no">308</span><span id="line-308"> Map&lt;TableName, Map&lt;String, Long&gt;&gt; previousTimestampMap = backupManager.readLogTimestampMap();</span>
<span class="source-line-no">309</span><span id="line-309"> backupInfo.setIncrTimestampMap(previousTimestampMap);</span>
<span class="source-line-no">310</span><span id="line-310"></span>
<span class="source-line-no">311</span><span id="line-311"> // The table list in backupInfo is good for both full backup and incremental backup.</span>
<span class="source-line-no">312</span><span id="line-312"> // For incremental backup, it contains the incremental backup table set.</span>
<span class="source-line-no">313</span><span id="line-313"> backupManager.writeRegionServerLogTimestamp(backupInfo.getTables(), newTimestamps);</span>
<span class="source-line-no">314</span><span id="line-314"></span>
<span class="source-line-no">315</span><span id="line-315"> Map&lt;TableName, Map&lt;String, Long&gt;&gt; newTableSetTimestampMap =</span>
<span class="source-line-no">316</span><span id="line-316"> backupManager.readLogTimestampMap();</span>
<span class="source-line-no">317</span><span id="line-317"></span>
<span class="source-line-no">318</span><span id="line-318"> backupInfo.setTableSetTimestampMap(newTableSetTimestampMap);</span>
<span class="source-line-no">319</span><span id="line-319"> Long newStartCode =</span>
<span class="source-line-no">320</span><span id="line-320"> BackupUtils.getMinValue(BackupUtils.getRSLogTimestampMins(newTableSetTimestampMap));</span>
<span class="source-line-no">321</span><span id="line-321"> backupManager.writeBackupStartCode(newStartCode);</span>
<span class="source-line-no">322</span><span id="line-322"></span>
<span class="source-line-no">323</span><span id="line-323"> List&lt;BulkLoad&gt; bulkLoads = handleBulkLoad(backupInfo.getTableNames());</span>
<span class="source-line-no">324</span><span id="line-324"></span>
<span class="source-line-no">325</span><span id="line-325"> // backup complete</span>
<span class="source-line-no">326</span><span id="line-326"> completeBackup(conn, backupInfo, BackupType.INCREMENTAL, conf);</span>
<span class="source-line-no">327</span><span id="line-327"></span>
<span class="source-line-no">328</span><span id="line-328"> List&lt;byte[]&gt; bulkLoadedRows = Lists.transform(bulkLoads, BulkLoad::getRowKey);</span>
<span class="source-line-no">329</span><span id="line-329"> backupManager.deleteBulkLoadedRows(bulkLoadedRows);</span>
<span class="source-line-no">330</span><span id="line-330"> } catch (IOException e) {</span>
<span class="source-line-no">331</span><span id="line-331"> failBackup(conn, backupInfo, backupManager, e, "Unexpected Exception : ",</span>
<span class="source-line-no">332</span><span id="line-332"> BackupType.INCREMENTAL, conf);</span>
<span class="source-line-no">333</span><span id="line-333"> throw new IOException(e);</span>
<span class="source-line-no">334</span><span id="line-334"> }</span>
<span class="source-line-no">335</span><span id="line-335"> }</span>
<span class="source-line-no">336</span><span id="line-336"></span>
<span class="source-line-no">337</span><span id="line-337"> protected void incrementalCopyHFiles(String[] files, String backupDest) throws IOException {</span>
<span class="source-line-no">338</span><span id="line-338"> boolean diskBasedSortingOriginalValue = HFileOutputFormat2.diskBasedSortingEnabled(conf);</span>
<span class="source-line-no">339</span><span id="line-339"> try {</span>
<span class="source-line-no">340</span><span id="line-340"> LOG.debug("Incremental copy HFiles is starting. dest=" + backupDest);</span>
<span class="source-line-no">341</span><span id="line-341"> // set overall backup phase: incremental_copy</span>
<span class="source-line-no">342</span><span id="line-342"> backupInfo.setPhase(BackupPhase.INCREMENTAL_COPY);</span>
<span class="source-line-no">343</span><span id="line-343"> // get incremental backup file list and prepare parms for DistCp</span>
<span class="source-line-no">344</span><span id="line-344"> String[] strArr = new String[files.length + 1];</span>
<span class="source-line-no">345</span><span id="line-345"> System.arraycopy(files, 0, strArr, 0, files.length);</span>
<span class="source-line-no">346</span><span id="line-346"> strArr[strArr.length - 1] = backupDest;</span>
<span class="source-line-no">347</span><span id="line-347"></span>
<span class="source-line-no">348</span><span id="line-348"> String jobname = "Incremental_Backup-HFileCopy-" + backupInfo.getBackupId();</span>
<span class="source-line-no">349</span><span id="line-349"> if (LOG.isDebugEnabled()) {</span>
<span class="source-line-no">350</span><span id="line-350"> LOG.debug("Setting incremental copy HFiles job name to : " + jobname);</span>
<span class="source-line-no">351</span><span id="line-351"> }</span>
<span class="source-line-no">352</span><span id="line-352"> conf.set(JOB_NAME_CONF_KEY, jobname);</span>
<span class="source-line-no">353</span><span id="line-353"> conf.setBoolean(HFileOutputFormat2.DISK_BASED_SORTING_ENABLED_KEY, true);</span>
<span class="source-line-no">354</span><span id="line-354"></span>
<span class="source-line-no">355</span><span id="line-355"> BackupCopyJob copyService = BackupRestoreFactory.getBackupCopyJob(conf);</span>
<span class="source-line-no">356</span><span id="line-356"> int res = copyService.copy(backupInfo, backupManager, conf, BackupType.INCREMENTAL, strArr);</span>
<span class="source-line-no">357</span><span id="line-357"> if (res != 0) {</span>
<span class="source-line-no">358</span><span id="line-358"> LOG.error("Copy incremental HFile files failed with return code: " + res + ".");</span>
<span class="source-line-no">359</span><span id="line-359"> throw new IOException(</span>
<span class="source-line-no">360</span><span id="line-360"> "Failed copy from " + StringUtils.join(files, ',') + " to " + backupDest);</span>
<span class="source-line-no">361</span><span id="line-361"> }</span>
<span class="source-line-no">362</span><span id="line-362"> LOG.debug("Incremental copy HFiles from " + StringUtils.join(files, ',') + " to " + backupDest</span>
<span class="source-line-no">363</span><span id="line-363"> + " finished.");</span>
<span class="source-line-no">364</span><span id="line-364"> } finally {</span>
<span class="source-line-no">365</span><span id="line-365"> deleteBulkLoadDirectory();</span>
<span class="source-line-no">366</span><span id="line-366"> conf.setBoolean(HFileOutputFormat2.DISK_BASED_SORTING_ENABLED_KEY,</span>
<span class="source-line-no">367</span><span id="line-367"> diskBasedSortingOriginalValue);</span>
<span class="source-line-no">368</span><span id="line-368"> }</span>
<span class="source-line-no">369</span><span id="line-369"> }</span>
<span class="source-line-no">370</span><span id="line-370"></span>
<span class="source-line-no">371</span><span id="line-371"> protected void deleteBulkLoadDirectory() throws IOException {</span>
<span class="source-line-no">372</span><span id="line-372"> // delete original bulk load directory on method exit</span>
<span class="source-line-no">373</span><span id="line-373"> Path path = getBulkOutputDir();</span>
<span class="source-line-no">374</span><span id="line-374"> FileSystem fs = FileSystem.get(path.toUri(), conf);</span>
<span class="source-line-no">375</span><span id="line-375"> boolean result = fs.delete(path, true);</span>
<span class="source-line-no">376</span><span id="line-376"> if (!result) {</span>
<span class="source-line-no">377</span><span id="line-377"> LOG.warn("Could not delete " + path);</span>
<span class="source-line-no">378</span><span id="line-378"> }</span>
<span class="source-line-no">379</span><span id="line-379"> }</span>
<span class="source-line-no">380</span><span id="line-380"></span>
<span class="source-line-no">381</span><span id="line-381"> protected void convertWALsToHFiles() throws IOException {</span>
<span class="source-line-no">382</span><span id="line-382"> // get incremental backup file list and prepare parameters for DistCp</span>
<span class="source-line-no">383</span><span id="line-383"> List&lt;String&gt; incrBackupFileList = backupInfo.getIncrBackupFileList();</span>
<span class="source-line-no">384</span><span id="line-384"> // Get list of tables in incremental backup set</span>
<span class="source-line-no">385</span><span id="line-385"> Set&lt;TableName&gt; tableSet = backupManager.getIncrementalBackupTableSet();</span>
<span class="source-line-no">386</span><span id="line-386"> // filter missing files out (they have been copied by previous backups)</span>
<span class="source-line-no">387</span><span id="line-387"> incrBackupFileList = filterMissingFiles(incrBackupFileList);</span>
<span class="source-line-no">388</span><span id="line-388"> List&lt;String&gt; tableList = new ArrayList&lt;String&gt;();</span>
<span class="source-line-no">389</span><span id="line-389"> for (TableName table : tableSet) {</span>
<span class="source-line-no">390</span><span id="line-390"> // Check if table exists</span>
<span class="source-line-no">391</span><span id="line-391"> if (tableExists(table, conn)) {</span>
<span class="source-line-no">392</span><span id="line-392"> tableList.add(table.getNameAsString());</span>
<span class="source-line-no">393</span><span id="line-393"> } else {</span>
<span class="source-line-no">394</span><span id="line-394"> LOG.warn("Table " + table + " does not exists. Skipping in WAL converter");</span>
<span class="source-line-no">395</span><span id="line-395"> }</span>
<span class="source-line-no">396</span><span id="line-396"> }</span>
<span class="source-line-no">397</span><span id="line-397"> walToHFiles(incrBackupFileList, tableList);</span>
<span class="source-line-no">398</span><span id="line-398"></span>
<span class="source-line-no">399</span><span id="line-399"> }</span>
<span class="source-line-no">400</span><span id="line-400"></span>
<span class="source-line-no">401</span><span id="line-401"> protected boolean tableExists(TableName table, Connection conn) throws IOException {</span>
<span class="source-line-no">402</span><span id="line-402"> try (Admin admin = conn.getAdmin()) {</span>
<span class="source-line-no">403</span><span id="line-403"> return admin.tableExists(table);</span>
<span class="source-line-no">404</span><span id="line-404"> }</span>
<span class="source-line-no">405</span><span id="line-405"> }</span>
<span class="source-line-no">406</span><span id="line-406"></span>
<span class="source-line-no">407</span><span id="line-407"> protected void walToHFiles(List&lt;String&gt; dirPaths, List&lt;String&gt; tableList) throws IOException {</span>
<span class="source-line-no">408</span><span id="line-408"> Tool player = new WALPlayer();</span>
<span class="source-line-no">409</span><span id="line-409"></span>
<span class="source-line-no">410</span><span id="line-410"> // Player reads all files in arbitrary directory structure and creates</span>
<span class="source-line-no">411</span><span id="line-411"> // a Map task for each file. We use ';' as separator</span>
<span class="source-line-no">412</span><span id="line-412"> // because WAL file names contains ','</span>
<span class="source-line-no">413</span><span id="line-413"> String dirs = StringUtils.join(dirPaths, ';');</span>
<span class="source-line-no">414</span><span id="line-414"> String jobname = "Incremental_Backup-" + backupId;</span>
<span class="source-line-no">415</span><span id="line-415"></span>
<span class="source-line-no">416</span><span id="line-416"> Path bulkOutputPath = getBulkOutputDir();</span>
<span class="source-line-no">417</span><span id="line-417"> conf.set(WALPlayer.BULK_OUTPUT_CONF_KEY, bulkOutputPath.toString());</span>
<span class="source-line-no">418</span><span id="line-418"> conf.set(WALPlayer.INPUT_FILES_SEPARATOR_KEY, ";");</span>
<span class="source-line-no">419</span><span id="line-419"> conf.setBoolean(WALPlayer.MULTI_TABLES_SUPPORT, true);</span>
<span class="source-line-no">420</span><span id="line-420"> conf.set(JOB_NAME_CONF_KEY, jobname);</span>
<span class="source-line-no">421</span><span id="line-421"></span>
<span class="source-line-no">422</span><span id="line-422"> boolean diskBasedSortingEnabledOriginalValue = HFileOutputFormat2.diskBasedSortingEnabled(conf);</span>
<span class="source-line-no">423</span><span id="line-423"> conf.setBoolean(HFileOutputFormat2.DISK_BASED_SORTING_ENABLED_KEY, true);</span>
<span class="source-line-no">424</span><span id="line-424"> String[] playerArgs = { dirs, StringUtils.join(tableList, ",") };</span>
<span class="source-line-no">425</span><span id="line-425"></span>
<span class="source-line-no">426</span><span id="line-426"> try {</span>
<span class="source-line-no">427</span><span id="line-427"> player.setConf(conf);</span>
<span class="source-line-no">428</span><span id="line-428"> int result = player.run(playerArgs);</span>
<span class="source-line-no">429</span><span id="line-429"> if (result != 0) {</span>
<span class="source-line-no">430</span><span id="line-430"> throw new IOException("WAL Player failed");</span>
<span class="source-line-no">431</span><span id="line-431"> }</span>
<span class="source-line-no">432</span><span id="line-432"> } catch (IOException e) {</span>
<span class="source-line-no">433</span><span id="line-433"> throw e;</span>
<span class="source-line-no">434</span><span id="line-434"> } catch (Exception ee) {</span>
<span class="source-line-no">435</span><span id="line-435"> throw new IOException("Can not convert from directory " + dirs</span>
<span class="source-line-no">436</span><span id="line-436"> + " (check Hadoop, HBase and WALPlayer M/R job logs) ", ee);</span>
<span class="source-line-no">437</span><span id="line-437"> } finally {</span>
<span class="source-line-no">438</span><span id="line-438"> conf.setBoolean(HFileOutputFormat2.DISK_BASED_SORTING_ENABLED_KEY,</span>
<span class="source-line-no">439</span><span id="line-439"> diskBasedSortingEnabledOriginalValue);</span>
<span class="source-line-no">440</span><span id="line-440"> conf.unset(WALPlayer.INPUT_FILES_SEPARATOR_KEY);</span>
<span class="source-line-no">441</span><span id="line-441"> conf.unset(JOB_NAME_CONF_KEY);</span>
<span class="source-line-no">442</span><span id="line-442"> }</span>
<span class="source-line-no">443</span><span id="line-443"> }</span>
<span class="source-line-no">444</span><span id="line-444"></span>
<span class="source-line-no">445</span><span id="line-445"> private void incrementalCopyBulkloadHFiles(FileSystem tgtFs, TableName tn) throws IOException {</span>
<span class="source-line-no">446</span><span id="line-446"> Path bulkOutDir = getBulkOutputDirForTable(tn);</span>
<span class="source-line-no">447</span><span id="line-447"></span>
<span class="source-line-no">448</span><span id="line-448"> if (tgtFs.exists(bulkOutDir)) {</span>
<span class="source-line-no">449</span><span id="line-449"> conf.setInt(MapReduceBackupCopyJob.NUMBER_OF_LEVELS_TO_PRESERVE_KEY, 2);</span>
<span class="source-line-no">450</span><span id="line-450"> Path tgtPath = getTargetDirForTable(tn);</span>
<span class="source-line-no">451</span><span id="line-451"> try {</span>
<span class="source-line-no">452</span><span id="line-452"> RemoteIterator&lt;LocatedFileStatus&gt; locatedFiles = tgtFs.listFiles(bulkOutDir, true);</span>
<span class="source-line-no">453</span><span id="line-453"> List&lt;String&gt; files = new ArrayList&lt;&gt;();</span>
<span class="source-line-no">454</span><span id="line-454"> while (locatedFiles.hasNext()) {</span>
<span class="source-line-no">455</span><span id="line-455"> LocatedFileStatus file = locatedFiles.next();</span>
<span class="source-line-no">456</span><span id="line-456"> if (file.isFile() &amp;&amp; HFile.isHFileFormat(tgtFs, file.getPath())) {</span>
<span class="source-line-no">457</span><span id="line-457"> files.add(file.getPath().toString());</span>
<span class="source-line-no">458</span><span id="line-458"> }</span>
<span class="source-line-no">459</span><span id="line-459"> }</span>
<span class="source-line-no">460</span><span id="line-460"> incrementalCopyHFiles(files.toArray(files.toArray(new String[0])), tgtPath.toString());</span>
<span class="source-line-no">461</span><span id="line-461"> } finally {</span>
<span class="source-line-no">462</span><span id="line-462"> conf.unset(MapReduceBackupCopyJob.NUMBER_OF_LEVELS_TO_PRESERVE_KEY);</span>
<span class="source-line-no">463</span><span id="line-463"> }</span>
<span class="source-line-no">464</span><span id="line-464"> }</span>
<span class="source-line-no">465</span><span id="line-465"> }</span>
<span class="source-line-no">466</span><span id="line-466"></span>
<span class="source-line-no">467</span><span id="line-467"> protected Path getBulkOutputDirForTable(TableName table) {</span>
<span class="source-line-no">468</span><span id="line-468"> Path tablePath = getBulkOutputDir();</span>
<span class="source-line-no">469</span><span id="line-469"> tablePath = new Path(tablePath, table.getNamespaceAsString());</span>
<span class="source-line-no">470</span><span id="line-470"> tablePath = new Path(tablePath, table.getQualifierAsString());</span>
<span class="source-line-no">471</span><span id="line-471"> return new Path(tablePath, "data");</span>
<span class="source-line-no">472</span><span id="line-472"> }</span>
<span class="source-line-no">473</span><span id="line-473"></span>
<span class="source-line-no">474</span><span id="line-474"> protected Path getBulkOutputDir() {</span>
<span class="source-line-no">475</span><span id="line-475"> String backupId = backupInfo.getBackupId();</span>
<span class="source-line-no">476</span><span id="line-476"> Path path = new Path(backupInfo.getBackupRootDir());</span>
<span class="source-line-no">477</span><span id="line-477"> path = new Path(path, ".tmp");</span>
<span class="source-line-no">478</span><span id="line-478"> path = new Path(path, backupId);</span>
<span class="source-line-no">479</span><span id="line-479"> return path;</span>
<span class="source-line-no">480</span><span id="line-480"> }</span>
<span class="source-line-no">481</span><span id="line-481"></span>
<span class="source-line-no">482</span><span id="line-482"> private Path getTargetDirForTable(TableName table) {</span>
<span class="source-line-no">483</span><span id="line-483"> Path path = new Path(backupInfo.getBackupRootDir() + Path.SEPARATOR + backupInfo.getBackupId());</span>
<span class="source-line-no">484</span><span id="line-484"> path = new Path(path, table.getNamespaceAsString());</span>
<span class="source-line-no">485</span><span id="line-485"> path = new Path(path, table.getQualifierAsString());</span>
<span class="source-line-no">486</span><span id="line-486"> return path;</span>
<span class="source-line-no">487</span><span id="line-487"> }</span>
<span class="source-line-no">488</span><span id="line-488"></span>
<span class="source-line-no">489</span><span id="line-489"> private void setupRegionLocator() throws IOException {</span>
<span class="source-line-no">490</span><span id="line-490"> Map&lt;TableName, String&gt; fullBackupIds = getFullBackupIds();</span>
<span class="source-line-no">491</span><span id="line-491"> try (BackupAdminImpl backupAdmin = new BackupAdminImpl(conn)) {</span>
<span class="source-line-no">492</span><span id="line-492"></span>
<span class="source-line-no">493</span><span id="line-493"> for (TableName tableName : backupInfo.getTables()) {</span>
<span class="source-line-no">494</span><span id="line-494"> String fullBackupId = fullBackupIds.get(tableName);</span>
<span class="source-line-no">495</span><span id="line-495"> BackupInfo fullBackupInfo = backupAdmin.getBackupInfo(fullBackupId);</span>
<span class="source-line-no">496</span><span id="line-496"> String snapshotName = fullBackupInfo.getSnapshotName(tableName);</span>
<span class="source-line-no">497</span><span id="line-497"> Path root = HBackupFileSystem.getTableBackupPath(tableName,</span>
<span class="source-line-no">498</span><span id="line-498"> new Path(fullBackupInfo.getBackupRootDir()), fullBackupId);</span>
<span class="source-line-no">499</span><span id="line-499"> String manifestDir =</span>
<span class="source-line-no">500</span><span id="line-500"> SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, root).toString();</span>
<span class="source-line-no">501</span><span id="line-501"> SnapshotRegionLocator.setSnapshotManifestDir(conf, manifestDir, tableName);</span>
<span class="source-line-no">502</span><span id="line-502"> }</span>
<span class="source-line-no">503</span><span id="line-503"> }</span>
<span class="source-line-no">504</span><span id="line-504"> }</span>
<span class="source-line-no">505</span><span id="line-505"></span>
<span class="source-line-no">506</span><span id="line-506"> private Map&lt;TableName, String&gt; getFullBackupIds() throws IOException {</span>
<span class="source-line-no">507</span><span id="line-507"> // Ancestors are stored from newest to oldest, so we can iterate backwards</span>
<span class="source-line-no">508</span><span id="line-508"> // in order to populate our backupId map with the most recent full backup</span>
<span class="source-line-no">509</span><span id="line-509"> // for a given table</span>
<span class="source-line-no">510</span><span id="line-510"> List&lt;BackupManifest.BackupImage&gt; images = getAncestors(backupInfo);</span>
<span class="source-line-no">511</span><span id="line-511"> Map&lt;TableName, String&gt; results = new HashMap&lt;&gt;();</span>
<span class="source-line-no">512</span><span id="line-512"> for (int i = images.size() - 1; i &gt;= 0; i--) {</span>
<span class="source-line-no">513</span><span id="line-513"> BackupManifest.BackupImage image = images.get(i);</span>
<span class="source-line-no">514</span><span id="line-514"> if (image.getType() != BackupType.FULL) {</span>
<span class="source-line-no">515</span><span id="line-515"> continue;</span>
<span class="source-line-no">516</span><span id="line-516"> }</span>
<span class="source-line-no">517</span><span id="line-517"></span>
<span class="source-line-no">518</span><span id="line-518"> for (TableName tn : image.getTableNames()) {</span>
<span class="source-line-no">519</span><span id="line-519"> results.put(tn, image.getBackupId());</span>
<span class="source-line-no">520</span><span id="line-520"> }</span>
<span class="source-line-no">521</span><span id="line-521"> }</span>
<span class="source-line-no">522</span><span id="line-522"> return results;</span>
<span class="source-line-no">523</span><span id="line-523"> }</span>
<span class="source-line-no">524</span><span id="line-524"></span>
<span class="source-line-no">525</span><span id="line-525"> /**</span>
<span class="source-line-no">526</span><span id="line-526"> * Verifies that the current table descriptor CFs matches the descriptor CFs of the last full</span>
<span class="source-line-no">527</span><span id="line-527"> * backup for the tables. This ensures CF compatibility across incremental backups. If a mismatch</span>
<span class="source-line-no">528</span><span id="line-528"> * is detected, a full table backup should be taken, rather than an incremental one</span>
<span class="source-line-no">529</span><span id="line-529"> */</span>
<span class="source-line-no">530</span><span id="line-530"> private void verifyCfCompatibility(Set&lt;TableName&gt; tables,</span>
<span class="source-line-no">531</span><span id="line-531"> Map&lt;TableName, String&gt; tablesToFullBackupId) throws IOException, ColumnFamilyMismatchException {</span>
<span class="source-line-no">532</span><span id="line-532"> ColumnFamilyMismatchException.ColumnFamilyMismatchExceptionBuilder exBuilder =</span>
<span class="source-line-no">533</span><span id="line-533"> ColumnFamilyMismatchException.newBuilder();</span>
<span class="source-line-no">534</span><span id="line-534"> try (Admin admin = conn.getAdmin(); BackupAdminImpl backupAdmin = new BackupAdminImpl(conn)) {</span>
<span class="source-line-no">535</span><span id="line-535"> for (TableName tn : tables) {</span>
<span class="source-line-no">536</span><span id="line-536"> String backupId = tablesToFullBackupId.get(tn);</span>
<span class="source-line-no">537</span><span id="line-537"> BackupInfo fullBackupInfo = backupAdmin.getBackupInfo(backupId);</span>
<span class="source-line-no">538</span><span id="line-538"></span>
<span class="source-line-no">539</span><span id="line-539"> ColumnFamilyDescriptor[] currentCfs = admin.getDescriptor(tn).getColumnFamilies();</span>
<span class="source-line-no">540</span><span id="line-540"> String snapshotName = fullBackupInfo.getSnapshotName(tn);</span>
<span class="source-line-no">541</span><span id="line-541"> Path root = HBackupFileSystem.getTableBackupPath(tn,</span>
<span class="source-line-no">542</span><span id="line-542"> new Path(fullBackupInfo.getBackupRootDir()), fullBackupInfo.getBackupId());</span>
<span class="source-line-no">543</span><span id="line-543"> Path manifestDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, root);</span>
<span class="source-line-no">544</span><span id="line-544"></span>
<span class="source-line-no">545</span><span id="line-545"> FileSystem fs;</span>
<span class="source-line-no">546</span><span id="line-546"> try {</span>
<span class="source-line-no">547</span><span id="line-547"> fs = FileSystem.get(new URI(fullBackupInfo.getBackupRootDir()), conf);</span>
<span class="source-line-no">548</span><span id="line-548"> } catch (URISyntaxException e) {</span>
<span class="source-line-no">549</span><span id="line-549"> throw new IOException("Unable to get fs for backup " + fullBackupInfo.getBackupId(), e);</span>
<span class="source-line-no">550</span><span id="line-550"> }</span>
<span class="source-line-no">551</span><span id="line-551"></span>
<span class="source-line-no">552</span><span id="line-552"> SnapshotProtos.SnapshotDescription snapshotDescription =</span>
<span class="source-line-no">553</span><span id="line-553"> SnapshotDescriptionUtils.readSnapshotInfo(fs, manifestDir);</span>
<span class="source-line-no">554</span><span id="line-554"> SnapshotManifest manifest =</span>
<span class="source-line-no">555</span><span id="line-555"> SnapshotManifest.open(conf, fs, manifestDir, snapshotDescription);</span>
<span class="source-line-no">556</span><span id="line-556"> if (</span>
<span class="source-line-no">557</span><span id="line-557"> SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),</span>
<span class="source-line-no">558</span><span id="line-558"> snapshotDescription.getCreationTime(), EnvironmentEdgeManager.currentTime())</span>
<span class="source-line-no">559</span><span id="line-559"> ) {</span>
<span class="source-line-no">560</span><span id="line-560"> throw new SnapshotTTLExpiredException(</span>
<span class="source-line-no">561</span><span id="line-561"> ProtobufUtil.createSnapshotDesc(snapshotDescription));</span>
<span class="source-line-no">562</span><span id="line-562"> }</span>
<span class="source-line-no">563</span><span id="line-563"></span>
<span class="source-line-no">564</span><span id="line-564"> ColumnFamilyDescriptor[] backupCfs = manifest.getTableDescriptor().getColumnFamilies();</span>
<span class="source-line-no">565</span><span id="line-565"> if (!areCfsCompatible(currentCfs, backupCfs)) {</span>
<span class="source-line-no">566</span><span id="line-566"> exBuilder.addMismatchedTable(tn, currentCfs, backupCfs);</span>
<span class="source-line-no">567</span><span id="line-567"> }</span>
<span class="source-line-no">568</span><span id="line-568"> }</span>
<span class="source-line-no">569</span><span id="line-569"> }</span>
<span class="source-line-no">570</span><span id="line-570"></span>
<span class="source-line-no">571</span><span id="line-571"> ColumnFamilyMismatchException ex = exBuilder.build();</span>
<span class="source-line-no">572</span><span id="line-572"> if (!ex.getMismatchedTables().isEmpty()) {</span>
<span class="source-line-no">573</span><span id="line-573"> throw ex;</span>
<span class="source-line-no">574</span><span id="line-574"> }</span>
<span class="source-line-no">575</span><span id="line-575"> }</span>
<span class="source-line-no">576</span><span id="line-576"></span>
<span class="source-line-no">577</span><span id="line-577"> private static boolean areCfsCompatible(ColumnFamilyDescriptor[] currentCfs,</span>
<span class="source-line-no">578</span><span id="line-578"> ColumnFamilyDescriptor[] backupCfs) {</span>
<span class="source-line-no">579</span><span id="line-579"> if (currentCfs.length != backupCfs.length) {</span>
<span class="source-line-no">580</span><span id="line-580"> return false;</span>
<span class="source-line-no">581</span><span id="line-581"> }</span>
<span class="source-line-no">582</span><span id="line-582"></span>
<span class="source-line-no">583</span><span id="line-583"> for (int i = 0; i &lt; backupCfs.length; i++) {</span>
<span class="source-line-no">584</span><span id="line-584"> String currentCf = currentCfs[i].getNameAsString();</span>
<span class="source-line-no">585</span><span id="line-585"> String backupCf = backupCfs[i].getNameAsString();</span>
<span class="source-line-no">586</span><span id="line-586"></span>
<span class="source-line-no">587</span><span id="line-587"> if (!currentCf.equals(backupCf)) {</span>
<span class="source-line-no">588</span><span id="line-588"> return false;</span>
<span class="source-line-no">589</span><span id="line-589"> }</span>
<span class="source-line-no">590</span><span id="line-590"> }</span>
<span class="source-line-no">591</span><span id="line-591"></span>
<span class="source-line-no">592</span><span id="line-592"> return true;</span>
<span class="source-line-no">593</span><span id="line-593"> }</span>
<span class="source-line-no">594</span><span id="line-594">}</span>
</pre>
</div>
</main>
</body>
</html>