| <!DOCTYPE html><html lang="en"><head><meta charset="utf-8"><meta name="viewport" content="width=device-width, initial-scale=1.0"><meta name="generator" content="rustdoc"><meta name="description" content="Source of the Rust file `/root/.cargo/git/checkouts/incubator-teaclave-crates-c8106113f74feefc/ede1f68/gbdt-rs/src/gradient_boost.rs`."><meta name="keywords" content="rust, rustlang, rust-lang"><title>gradient_boost.rs - source</title><link rel="preload" as="font" type="font/woff2" crossorigin href="../../SourceSerif4-Regular.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../FiraSans-Regular.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../FiraSans-Medium.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../SourceCodePro-Regular.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../SourceSerif4-Bold.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../SourceCodePro-Semibold.ttf.woff2"><link rel="stylesheet" href="../../normalize.css"><link rel="stylesheet" href="../../rustdoc.css" id="mainThemeStyle"><link rel="stylesheet" href="../../ayu.css" disabled><link rel="stylesheet" href="../../dark.css" disabled><link rel="stylesheet" href="../../light.css" id="themeStyle"><script id="default-settings" ></script><script src="../../storage.js"></script><script defer src="../../source-script.js"></script><script defer src="../../source-files.js"></script><script defer src="../../main.js"></script><noscript><link rel="stylesheet" href="../../noscript.css"></noscript><link rel="alternate icon" type="image/png" href="../../favicon-16x16.png"><link rel="alternate icon" type="image/png" href="../../favicon-32x32.png"><link rel="icon" type="image/svg+xml" href="../../favicon.svg"></head><body class="rustdoc source"><!--[if lte IE 11]><div class="warning">This old browser is unsupported and will most likely display funky things.</div><![endif]--><nav class="sidebar"><a class="sidebar-logo" href="../../gbdt/index.html"><div class="logo-container"><img class="rust-logo" src="../../rust-logo.svg" alt="logo"></div></a></nav><main><div class="width-limiter"><nav class="sub"><a class="sub-logo-container" href="../../gbdt/index.html"><img class="rust-logo" src="../../rust-logo.svg" alt="logo"></a><form class="search-form"><div class="search-container"><span></span><input class="search-input" name="search" autocomplete="off" spellcheck="false" placeholder="Click or press ‘S’ to search, ‘?’ for more options…" type="search"><div id="help-button" title="help" tabindex="-1"><a href="../../help.html">?</a></div><div id="settings-menu" tabindex="-1"><a href="../../settings.html" title="settings"><img width="22" height="22" alt="Change settings" src="../../wheel.svg"></a></div></div></form></nav><section id="main-content" class="content"><div class="example-wrap"><pre class="src-line-numbers"><span id="1">1</span> |
| <span id="2">2</span> |
| <span id="3">3</span> |
| <span id="4">4</span> |
| <span id="5">5</span> |
| <span id="6">6</span> |
| <span id="7">7</span> |
| <span id="8">8</span> |
| <span id="9">9</span> |
| <span id="10">10</span> |
| <span id="11">11</span> |
| <span id="12">12</span> |
| <span id="13">13</span> |
| <span id="14">14</span> |
| <span id="15">15</span> |
| <span id="16">16</span> |
| <span id="17">17</span> |
| <span id="18">18</span> |
| <span id="19">19</span> |
| <span id="20">20</span> |
| <span id="21">21</span> |
| <span id="22">22</span> |
| <span id="23">23</span> |
| <span id="24">24</span> |
| <span id="25">25</span> |
| <span id="26">26</span> |
| <span id="27">27</span> |
| <span id="28">28</span> |
| <span id="29">29</span> |
| <span id="30">30</span> |
| <span id="31">31</span> |
| <span id="32">32</span> |
| <span id="33">33</span> |
| <span id="34">34</span> |
| <span id="35">35</span> |
| <span id="36">36</span> |
| <span id="37">37</span> |
| <span id="38">38</span> |
| <span id="39">39</span> |
| <span id="40">40</span> |
| <span id="41">41</span> |
| <span id="42">42</span> |
| <span id="43">43</span> |
| <span id="44">44</span> |
| <span id="45">45</span> |
| <span id="46">46</span> |
| <span id="47">47</span> |
| <span id="48">48</span> |
| <span id="49">49</span> |
| <span id="50">50</span> |
| <span id="51">51</span> |
| <span id="52">52</span> |
| <span id="53">53</span> |
| <span id="54">54</span> |
| <span id="55">55</span> |
| <span id="56">56</span> |
| <span id="57">57</span> |
| <span id="58">58</span> |
| <span id="59">59</span> |
| <span id="60">60</span> |
| <span id="61">61</span> |
| <span id="62">62</span> |
| <span id="63">63</span> |
| <span id="64">64</span> |
| <span id="65">65</span> |
| <span id="66">66</span> |
| <span id="67">67</span> |
| <span id="68">68</span> |
| <span id="69">69</span> |
| <span id="70">70</span> |
| <span id="71">71</span> |
| <span id="72">72</span> |
| <span id="73">73</span> |
| <span id="74">74</span> |
| <span id="75">75</span> |
| <span id="76">76</span> |
| <span id="77">77</span> |
| <span id="78">78</span> |
| <span id="79">79</span> |
| <span id="80">80</span> |
| <span id="81">81</span> |
| <span id="82">82</span> |
| <span id="83">83</span> |
| <span id="84">84</span> |
| <span id="85">85</span> |
| <span id="86">86</span> |
| <span id="87">87</span> |
| <span id="88">88</span> |
| <span id="89">89</span> |
| <span id="90">90</span> |
| <span id="91">91</span> |
| <span id="92">92</span> |
| <span id="93">93</span> |
| <span id="94">94</span> |
| <span id="95">95</span> |
| <span id="96">96</span> |
| <span id="97">97</span> |
| <span id="98">98</span> |
| <span id="99">99</span> |
| <span id="100">100</span> |
| <span id="101">101</span> |
| <span id="102">102</span> |
| <span id="103">103</span> |
| <span id="104">104</span> |
| <span id="105">105</span> |
| <span id="106">106</span> |
| <span id="107">107</span> |
| <span id="108">108</span> |
| <span id="109">109</span> |
| <span id="110">110</span> |
| <span id="111">111</span> |
| <span id="112">112</span> |
| <span id="113">113</span> |
| <span id="114">114</span> |
| <span id="115">115</span> |
| <span id="116">116</span> |
| <span id="117">117</span> |
| <span id="118">118</span> |
| <span id="119">119</span> |
| <span id="120">120</span> |
| <span id="121">121</span> |
| <span id="122">122</span> |
| <span id="123">123</span> |
| <span id="124">124</span> |
| <span id="125">125</span> |
| <span id="126">126</span> |
| <span id="127">127</span> |
| <span id="128">128</span> |
| <span id="129">129</span> |
| <span id="130">130</span> |
| <span id="131">131</span> |
| <span id="132">132</span> |
| <span id="133">133</span> |
| <span id="134">134</span> |
| <span id="135">135</span> |
| <span id="136">136</span> |
| <span id="137">137</span> |
| <span id="138">138</span> |
| <span id="139">139</span> |
| <span id="140">140</span> |
| <span id="141">141</span> |
| <span id="142">142</span> |
| <span id="143">143</span> |
| <span id="144">144</span> |
| <span id="145">145</span> |
| <span id="146">146</span> |
| <span id="147">147</span> |
| <span id="148">148</span> |
| <span id="149">149</span> |
| <span id="150">150</span> |
| <span id="151">151</span> |
| <span id="152">152</span> |
| <span id="153">153</span> |
| <span id="154">154</span> |
| <span id="155">155</span> |
| <span id="156">156</span> |
| <span id="157">157</span> |
| <span id="158">158</span> |
| <span id="159">159</span> |
| <span id="160">160</span> |
| <span id="161">161</span> |
| <span id="162">162</span> |
| <span id="163">163</span> |
| <span id="164">164</span> |
| <span id="165">165</span> |
| <span id="166">166</span> |
| <span id="167">167</span> |
| <span id="168">168</span> |
| <span id="169">169</span> |
| <span id="170">170</span> |
| <span id="171">171</span> |
| <span id="172">172</span> |
| <span id="173">173</span> |
| <span id="174">174</span> |
| <span id="175">175</span> |
| <span id="176">176</span> |
| <span id="177">177</span> |
| <span id="178">178</span> |
| <span id="179">179</span> |
| <span id="180">180</span> |
| <span id="181">181</span> |
| <span id="182">182</span> |
| <span id="183">183</span> |
| <span id="184">184</span> |
| <span id="185">185</span> |
| <span id="186">186</span> |
| <span id="187">187</span> |
| <span id="188">188</span> |
| <span id="189">189</span> |
| <span id="190">190</span> |
| <span id="191">191</span> |
| <span id="192">192</span> |
| <span id="193">193</span> |
| <span id="194">194</span> |
| <span id="195">195</span> |
| <span id="196">196</span> |
| <span id="197">197</span> |
| <span id="198">198</span> |
| <span id="199">199</span> |
| <span id="200">200</span> |
| <span id="201">201</span> |
| <span id="202">202</span> |
| <span id="203">203</span> |
| <span id="204">204</span> |
| <span id="205">205</span> |
| <span id="206">206</span> |
| <span id="207">207</span> |
| <span id="208">208</span> |
| <span id="209">209</span> |
| <span id="210">210</span> |
| <span id="211">211</span> |
| <span id="212">212</span> |
| <span id="213">213</span> |
| <span id="214">214</span> |
| <span id="215">215</span> |
| <span id="216">216</span> |
| <span id="217">217</span> |
| <span id="218">218</span> |
| <span id="219">219</span> |
| <span id="220">220</span> |
| <span id="221">221</span> |
| <span id="222">222</span> |
| <span id="223">223</span> |
| <span id="224">224</span> |
| <span id="225">225</span> |
| <span id="226">226</span> |
| <span id="227">227</span> |
| <span id="228">228</span> |
| <span id="229">229</span> |
| <span id="230">230</span> |
| <span id="231">231</span> |
| <span id="232">232</span> |
| <span id="233">233</span> |
| <span id="234">234</span> |
| <span id="235">235</span> |
| <span id="236">236</span> |
| <span id="237">237</span> |
| <span id="238">238</span> |
| <span id="239">239</span> |
| <span id="240">240</span> |
| <span id="241">241</span> |
| <span id="242">242</span> |
| <span id="243">243</span> |
| <span id="244">244</span> |
| <span id="245">245</span> |
| <span id="246">246</span> |
| <span id="247">247</span> |
| <span id="248">248</span> |
| <span id="249">249</span> |
| <span id="250">250</span> |
| <span id="251">251</span> |
| <span id="252">252</span> |
| <span id="253">253</span> |
| <span id="254">254</span> |
| <span id="255">255</span> |
| <span id="256">256</span> |
| <span id="257">257</span> |
| <span id="258">258</span> |
| <span id="259">259</span> |
| <span id="260">260</span> |
| <span id="261">261</span> |
| <span id="262">262</span> |
| <span id="263">263</span> |
| <span id="264">264</span> |
| <span id="265">265</span> |
| <span id="266">266</span> |
| <span id="267">267</span> |
| <span id="268">268</span> |
| <span id="269">269</span> |
| <span id="270">270</span> |
| <span id="271">271</span> |
| <span id="272">272</span> |
| <span id="273">273</span> |
| <span id="274">274</span> |
| <span id="275">275</span> |
| <span id="276">276</span> |
| <span id="277">277</span> |
| <span id="278">278</span> |
| <span id="279">279</span> |
| <span id="280">280</span> |
| <span id="281">281</span> |
| <span id="282">282</span> |
| <span id="283">283</span> |
| <span id="284">284</span> |
| <span id="285">285</span> |
| <span id="286">286</span> |
| <span id="287">287</span> |
| <span id="288">288</span> |
| <span id="289">289</span> |
| <span id="290">290</span> |
| <span id="291">291</span> |
| <span id="292">292</span> |
| <span id="293">293</span> |
| <span id="294">294</span> |
| <span id="295">295</span> |
| <span id="296">296</span> |
| <span id="297">297</span> |
| <span id="298">298</span> |
| <span id="299">299</span> |
| <span id="300">300</span> |
| <span id="301">301</span> |
| <span id="302">302</span> |
| <span id="303">303</span> |
| <span id="304">304</span> |
| <span id="305">305</span> |
| <span id="306">306</span> |
| <span id="307">307</span> |
| <span id="308">308</span> |
| <span id="309">309</span> |
| <span id="310">310</span> |
| <span id="311">311</span> |
| <span id="312">312</span> |
| <span id="313">313</span> |
| <span id="314">314</span> |
| <span id="315">315</span> |
| <span id="316">316</span> |
| <span id="317">317</span> |
| <span id="318">318</span> |
| <span id="319">319</span> |
| <span id="320">320</span> |
| <span id="321">321</span> |
| <span id="322">322</span> |
| <span id="323">323</span> |
| <span id="324">324</span> |
| <span id="325">325</span> |
| <span id="326">326</span> |
| <span id="327">327</span> |
| <span id="328">328</span> |
| <span id="329">329</span> |
| <span id="330">330</span> |
| <span id="331">331</span> |
| <span id="332">332</span> |
| <span id="333">333</span> |
| <span id="334">334</span> |
| <span id="335">335</span> |
| <span id="336">336</span> |
| <span id="337">337</span> |
| <span id="338">338</span> |
| <span id="339">339</span> |
| <span id="340">340</span> |
| <span id="341">341</span> |
| <span id="342">342</span> |
| <span id="343">343</span> |
| <span id="344">344</span> |
| <span id="345">345</span> |
| <span id="346">346</span> |
| <span id="347">347</span> |
| <span id="348">348</span> |
| <span id="349">349</span> |
| <span id="350">350</span> |
| <span id="351">351</span> |
| <span id="352">352</span> |
| <span id="353">353</span> |
| <span id="354">354</span> |
| <span id="355">355</span> |
| <span id="356">356</span> |
| <span id="357">357</span> |
| <span id="358">358</span> |
| <span id="359">359</span> |
| <span id="360">360</span> |
| <span id="361">361</span> |
| <span id="362">362</span> |
| <span id="363">363</span> |
| <span id="364">364</span> |
| <span id="365">365</span> |
| <span id="366">366</span> |
| <span id="367">367</span> |
| <span id="368">368</span> |
| <span id="369">369</span> |
| <span id="370">370</span> |
| <span id="371">371</span> |
| <span id="372">372</span> |
| <span id="373">373</span> |
| <span id="374">374</span> |
| <span id="375">375</span> |
| <span id="376">376</span> |
| <span id="377">377</span> |
| <span id="378">378</span> |
| <span id="379">379</span> |
| <span id="380">380</span> |
| <span id="381">381</span> |
| <span id="382">382</span> |
| <span id="383">383</span> |
| <span id="384">384</span> |
| <span id="385">385</span> |
| <span id="386">386</span> |
| <span id="387">387</span> |
| <span id="388">388</span> |
| <span id="389">389</span> |
| <span id="390">390</span> |
| <span id="391">391</span> |
| <span id="392">392</span> |
| <span id="393">393</span> |
| <span id="394">394</span> |
| <span id="395">395</span> |
| <span id="396">396</span> |
| <span id="397">397</span> |
| <span id="398">398</span> |
| <span id="399">399</span> |
| <span id="400">400</span> |
| <span id="401">401</span> |
| <span id="402">402</span> |
| <span id="403">403</span> |
| <span id="404">404</span> |
| <span id="405">405</span> |
| <span id="406">406</span> |
| <span id="407">407</span> |
| <span id="408">408</span> |
| <span id="409">409</span> |
| <span id="410">410</span> |
| <span id="411">411</span> |
| <span id="412">412</span> |
| <span id="413">413</span> |
| <span id="414">414</span> |
| <span id="415">415</span> |
| <span id="416">416</span> |
| <span id="417">417</span> |
| <span id="418">418</span> |
| <span id="419">419</span> |
| <span id="420">420</span> |
| <span id="421">421</span> |
| <span id="422">422</span> |
| <span id="423">423</span> |
| <span id="424">424</span> |
| <span id="425">425</span> |
| <span id="426">426</span> |
| <span id="427">427</span> |
| <span id="428">428</span> |
| <span id="429">429</span> |
| <span id="430">430</span> |
| <span id="431">431</span> |
| <span id="432">432</span> |
| <span id="433">433</span> |
| <span id="434">434</span> |
| <span id="435">435</span> |
| <span id="436">436</span> |
| <span id="437">437</span> |
| <span id="438">438</span> |
| <span id="439">439</span> |
| <span id="440">440</span> |
| <span id="441">441</span> |
| <span id="442">442</span> |
| <span id="443">443</span> |
| <span id="444">444</span> |
| <span id="445">445</span> |
| <span id="446">446</span> |
| <span id="447">447</span> |
| <span id="448">448</span> |
| <span id="449">449</span> |
| <span id="450">450</span> |
| <span id="451">451</span> |
| <span id="452">452</span> |
| <span id="453">453</span> |
| <span id="454">454</span> |
| <span id="455">455</span> |
| <span id="456">456</span> |
| <span id="457">457</span> |
| <span id="458">458</span> |
| <span id="459">459</span> |
| <span id="460">460</span> |
| <span id="461">461</span> |
| <span id="462">462</span> |
| <span id="463">463</span> |
| <span id="464">464</span> |
| <span id="465">465</span> |
| <span id="466">466</span> |
| <span id="467">467</span> |
| <span id="468">468</span> |
| <span id="469">469</span> |
| <span id="470">470</span> |
| <span id="471">471</span> |
| <span id="472">472</span> |
| <span id="473">473</span> |
| <span id="474">474</span> |
| <span id="475">475</span> |
| <span id="476">476</span> |
| <span id="477">477</span> |
| <span id="478">478</span> |
| <span id="479">479</span> |
| <span id="480">480</span> |
| <span id="481">481</span> |
| <span id="482">482</span> |
| <span id="483">483</span> |
| <span id="484">484</span> |
| <span id="485">485</span> |
| <span id="486">486</span> |
| <span id="487">487</span> |
| <span id="488">488</span> |
| <span id="489">489</span> |
| <span id="490">490</span> |
| <span id="491">491</span> |
| <span id="492">492</span> |
| <span id="493">493</span> |
| <span id="494">494</span> |
| <span id="495">495</span> |
| <span id="496">496</span> |
| <span id="497">497</span> |
| <span id="498">498</span> |
| <span id="499">499</span> |
| <span id="500">500</span> |
| <span id="501">501</span> |
| <span id="502">502</span> |
| <span id="503">503</span> |
| <span id="504">504</span> |
| <span id="505">505</span> |
| <span id="506">506</span> |
| <span id="507">507</span> |
| <span id="508">508</span> |
| <span id="509">509</span> |
| <span id="510">510</span> |
| <span id="511">511</span> |
| <span id="512">512</span> |
| <span id="513">513</span> |
| <span id="514">514</span> |
| <span id="515">515</span> |
| <span id="516">516</span> |
| <span id="517">517</span> |
| <span id="518">518</span> |
| <span id="519">519</span> |
| <span id="520">520</span> |
| <span id="521">521</span> |
| <span id="522">522</span> |
| <span id="523">523</span> |
| <span id="524">524</span> |
| <span id="525">525</span> |
| <span id="526">526</span> |
| <span id="527">527</span> |
| <span id="528">528</span> |
| <span id="529">529</span> |
| <span id="530">530</span> |
| <span id="531">531</span> |
| <span id="532">532</span> |
| <span id="533">533</span> |
| <span id="534">534</span> |
| <span id="535">535</span> |
| <span id="536">536</span> |
| <span id="537">537</span> |
| <span id="538">538</span> |
| <span id="539">539</span> |
| <span id="540">540</span> |
| <span id="541">541</span> |
| <span id="542">542</span> |
| <span id="543">543</span> |
| <span id="544">544</span> |
| <span id="545">545</span> |
| <span id="546">546</span> |
| <span id="547">547</span> |
| <span id="548">548</span> |
| <span id="549">549</span> |
| <span id="550">550</span> |
| <span id="551">551</span> |
| <span id="552">552</span> |
| <span id="553">553</span> |
| <span id="554">554</span> |
| <span id="555">555</span> |
| <span id="556">556</span> |
| <span id="557">557</span> |
| <span id="558">558</span> |
| <span id="559">559</span> |
| <span id="560">560</span> |
| <span id="561">561</span> |
| <span id="562">562</span> |
| <span id="563">563</span> |
| <span id="564">564</span> |
| <span id="565">565</span> |
| <span id="566">566</span> |
| <span id="567">567</span> |
| <span id="568">568</span> |
| <span id="569">569</span> |
| <span id="570">570</span> |
| <span id="571">571</span> |
| <span id="572">572</span> |
| <span id="573">573</span> |
| <span id="574">574</span> |
| <span id="575">575</span> |
| <span id="576">576</span> |
| <span id="577">577</span> |
| <span id="578">578</span> |
| <span id="579">579</span> |
| <span id="580">580</span> |
| <span id="581">581</span> |
| <span id="582">582</span> |
| <span id="583">583</span> |
| <span id="584">584</span> |
| <span id="585">585</span> |
| <span id="586">586</span> |
| <span id="587">587</span> |
| <span id="588">588</span> |
| <span id="589">589</span> |
| <span id="590">590</span> |
| <span id="591">591</span> |
| <span id="592">592</span> |
| <span id="593">593</span> |
| <span id="594">594</span> |
| <span id="595">595</span> |
| <span id="596">596</span> |
| <span id="597">597</span> |
| <span id="598">598</span> |
| <span id="599">599</span> |
| <span id="600">600</span> |
| <span id="601">601</span> |
| <span id="602">602</span> |
| <span id="603">603</span> |
| <span id="604">604</span> |
| <span id="605">605</span> |
| <span id="606">606</span> |
| <span id="607">607</span> |
| <span id="608">608</span> |
| <span id="609">609</span> |
| <span id="610">610</span> |
| <span id="611">611</span> |
| <span id="612">612</span> |
| <span id="613">613</span> |
| <span id="614">614</span> |
| <span id="615">615</span> |
| <span id="616">616</span> |
| <span id="617">617</span> |
| <span id="618">618</span> |
| <span id="619">619</span> |
| <span id="620">620</span> |
| <span id="621">621</span> |
| <span id="622">622</span> |
| <span id="623">623</span> |
| <span id="624">624</span> |
| <span id="625">625</span> |
| <span id="626">626</span> |
| <span id="627">627</span> |
| <span id="628">628</span> |
| <span id="629">629</span> |
| <span id="630">630</span> |
| <span id="631">631</span> |
| <span id="632">632</span> |
| <span id="633">633</span> |
| <span id="634">634</span> |
| <span id="635">635</span> |
| <span id="636">636</span> |
| <span id="637">637</span> |
| <span id="638">638</span> |
| <span id="639">639</span> |
| <span id="640">640</span> |
| <span id="641">641</span> |
| <span id="642">642</span> |
| <span id="643">643</span> |
| <span id="644">644</span> |
| <span id="645">645</span> |
| <span id="646">646</span> |
| <span id="647">647</span> |
| <span id="648">648</span> |
| <span id="649">649</span> |
| <span id="650">650</span> |
| <span id="651">651</span> |
| <span id="652">652</span> |
| <span id="653">653</span> |
| <span id="654">654</span> |
| <span id="655">655</span> |
| <span id="656">656</span> |
| <span id="657">657</span> |
| <span id="658">658</span> |
| <span id="659">659</span> |
| <span id="660">660</span> |
| <span id="661">661</span> |
| <span id="662">662</span> |
| <span id="663">663</span> |
| <span id="664">664</span> |
| <span id="665">665</span> |
| <span id="666">666</span> |
| <span id="667">667</span> |
| <span id="668">668</span> |
| <span id="669">669</span> |
| <span id="670">670</span> |
| <span id="671">671</span> |
| <span id="672">672</span> |
| <span id="673">673</span> |
| <span id="674">674</span> |
| <span id="675">675</span> |
| <span id="676">676</span> |
| <span id="677">677</span> |
| <span id="678">678</span> |
| <span id="679">679</span> |
| <span id="680">680</span> |
| <span id="681">681</span> |
| <span id="682">682</span> |
| <span id="683">683</span> |
| <span id="684">684</span> |
| <span id="685">685</span> |
| <span id="686">686</span> |
| <span id="687">687</span> |
| <span id="688">688</span> |
| <span id="689">689</span> |
| <span id="690">690</span> |
| <span id="691">691</span> |
| <span id="692">692</span> |
| <span id="693">693</span> |
| <span id="694">694</span> |
| <span id="695">695</span> |
| <span id="696">696</span> |
| <span id="697">697</span> |
| <span id="698">698</span> |
| <span id="699">699</span> |
| <span id="700">700</span> |
| <span id="701">701</span> |
| <span id="702">702</span> |
| <span id="703">703</span> |
| <span id="704">704</span> |
| <span id="705">705</span> |
| <span id="706">706</span> |
| <span id="707">707</span> |
| <span id="708">708</span> |
| <span id="709">709</span> |
| <span id="710">710</span> |
| <span id="711">711</span> |
| <span id="712">712</span> |
| <span id="713">713</span> |
| <span id="714">714</span> |
| <span id="715">715</span> |
| <span id="716">716</span> |
| <span id="717">717</span> |
| <span id="718">718</span> |
| <span id="719">719</span> |
| <span id="720">720</span> |
| <span id="721">721</span> |
| <span id="722">722</span> |
| <span id="723">723</span> |
| <span id="724">724</span> |
| <span id="725">725</span> |
| <span id="726">726</span> |
| <span id="727">727</span> |
| <span id="728">728</span> |
| <span id="729">729</span> |
| <span id="730">730</span> |
| <span id="731">731</span> |
| <span id="732">732</span> |
| <span id="733">733</span> |
| <span id="734">734</span> |
| <span id="735">735</span> |
| <span id="736">736</span> |
| <span id="737">737</span> |
| <span id="738">738</span> |
| <span id="739">739</span> |
| <span id="740">740</span> |
| <span id="741">741</span> |
| <span id="742">742</span> |
| <span id="743">743</span> |
| <span id="744">744</span> |
| <span id="745">745</span> |
| <span id="746">746</span> |
| <span id="747">747</span> |
| <span id="748">748</span> |
| <span id="749">749</span> |
| <span id="750">750</span> |
| <span id="751">751</span> |
| <span id="752">752</span> |
| <span id="753">753</span> |
| <span id="754">754</span> |
| <span id="755">755</span> |
| <span id="756">756</span> |
| <span id="757">757</span> |
| <span id="758">758</span> |
| <span id="759">759</span> |
| <span id="760">760</span> |
| <span id="761">761</span> |
| <span id="762">762</span> |
| <span id="763">763</span> |
| <span id="764">764</span> |
| <span id="765">765</span> |
| <span id="766">766</span> |
| <span id="767">767</span> |
| <span id="768">768</span> |
| <span id="769">769</span> |
| <span id="770">770</span> |
| <span id="771">771</span> |
| <span id="772">772</span> |
| <span id="773">773</span> |
| <span id="774">774</span> |
| <span id="775">775</span> |
| <span id="776">776</span> |
| <span id="777">777</span> |
| <span id="778">778</span> |
| <span id="779">779</span> |
| <span id="780">780</span> |
| <span id="781">781</span> |
| <span id="782">782</span> |
| <span id="783">783</span> |
| <span id="784">784</span> |
| <span id="785">785</span> |
| <span id="786">786</span> |
| <span id="787">787</span> |
| <span id="788">788</span> |
| <span id="789">789</span> |
| <span id="790">790</span> |
| <span id="791">791</span> |
| <span id="792">792</span> |
| <span id="793">793</span> |
| <span id="794">794</span> |
| <span id="795">795</span> |
| <span id="796">796</span> |
| <span id="797">797</span> |
| <span id="798">798</span> |
| <span id="799">799</span> |
| <span id="800">800</span> |
| <span id="801">801</span> |
| <span id="802">802</span> |
| <span id="803">803</span> |
| <span id="804">804</span> |
| </pre><pre class="rust"><code><span class="doccomment">//! This module implements the process of gradient boosting decision tree |
| //! algorithm. This module depends on the following module: |
| //! |
| //! 1. [gbdt::config::Config](../config/): [Config](../config/struct.Config.html) is needed to configure the gbdt algorithm. |
| //! |
| //! 2. [gbdt::decision_tree](../decision_tree/): [DecisionTree](../decision_tree/struct.DecisionTree.html) is used |
| //! for training and predicting. |
| //! |
| //! 3. [rand](https://docs.rs/rand/0.6.1/rand/): This standard module is used to randomly select the data or |
| //! features used in a single iteration of training if the |
| //! [data_sample_ratio](../config/struct.Config.html#structfield.data_sample_ratio) or |
| //! [feature_sample_ratio](../config/struct.Config.html#structfield.feature_sample_ratio) is less than 1.0 . |
| //! |
| //! # Example |
| //! ```rust |
| //! use gbdt::config::Config; |
| //! use gbdt::gradient_boost::GBDT; |
| //! use gbdt::decision_tree::{Data, DataVec}; |
| //! |
| //! // set config for algorithm |
| //! let mut cfg = Config::new(); |
| //! cfg.set_feature_size(3); |
| //! cfg.set_max_depth(2); |
| //! cfg.set_min_leaf_size(1); |
| //! cfg.set_loss("SquaredError"); |
| //! cfg.set_iterations(2); |
| //! |
| //! // initialize GBDT algorithm |
| //! let mut gbdt = GBDT::new(&cfg); |
| //! |
| //! // setup training data |
| //! let data1 = Data::new_training_data ( |
| //! vec![1.0, 2.0, 3.0], |
| //! 1.0, |
| //! 1.0, |
| //! None |
| //! ); |
| //! let data2 = Data::new_training_data ( |
| //! vec![1.1, 2.1, 3.1], |
| //! 1.0, |
| //! 1.0, |
| //! None |
| //! ); |
| //! let data3 = Data::new_training_data ( |
| //! vec![2.0, 2.0, 1.0], |
| //! 1.0, |
| //! 2.0, |
| //! None |
| //! ); |
| //! let data4 = Data::new_training_data ( |
| //! vec![2.0, 2.3, 1.2], |
| //! 1.0, |
| //! 0.0, |
| //! None |
| //! ); |
| //! |
| //! let mut training_data: DataVec = Vec::new(); |
| //! training_data.push(data1.clone()); |
| //! training_data.push(data2.clone()); |
| //! training_data.push(data3.clone()); |
| //! training_data.push(data4.clone()); |
| //! |
| //! // train the decision trees. |
| //! gbdt.fit(&mut training_data); |
| //! |
| //! // setup the test data |
| //! |
| //! let mut test_data: DataVec = Vec::new(); |
| //! test_data.push(data1.clone()); |
| //! test_data.push(data2.clone()); |
| //! test_data.push(Data::new_test_data( |
| //! vec![2.0, 2.0, 1.0], |
| //! None)); |
| //! test_data.push(Data::new_test_data( |
| //! vec![2.0, 2.3, 1.2], |
| //! None)); |
| //! |
| //! println!("{:?}", gbdt.predict(&test_data)); |
| //! |
| //! // output: |
| //! // [1.0, 1.0, 2.0, 0.0] |
| //! ``` |
| |
| </span><span class="attribute">#[cfg(all(feature = <span class="string">"mesalock_sgx"</span>, not(target_env = <span class="string">"sgx"</span>)))] |
| </span><span class="kw">use </span>std::prelude::v1::<span class="kw-2">*</span>; |
| |
| <span class="kw">use </span><span class="kw">crate</span>::config::{Config, Loss}; |
| <span class="kw">use </span><span class="kw">crate</span>::decision_tree::DecisionTree; |
| <span class="attribute">#[cfg(feature = <span class="string">"enable_training"</span>)] |
| </span><span class="kw">use </span><span class="kw">crate</span>::decision_tree::TrainingCache; |
| <span class="kw">use </span><span class="kw">crate</span>::decision_tree::{DataVec, PredVec, ValueType, VALUE_TYPE_MIN, VALUE_TYPE_UNKNOWN}; |
| <span class="kw">use </span><span class="kw">crate</span>::errors::Result; |
| <span class="attribute">#[cfg(feature = <span class="string">"enable_training"</span>)] |
| </span><span class="kw">use </span><span class="kw">crate</span>::fitness::{label_average, logit_loss_gradient, weighted_label_median, AUC, MAE, RMSE}; |
| <span class="attribute">#[cfg(feature = <span class="string">"enable_training"</span>)] |
| </span><span class="kw">use </span>rand::prelude::SliceRandom; |
| <span class="attribute">#[cfg(feature = <span class="string">"enable_training"</span>)] |
| </span><span class="kw">use </span>rand::thread_rng; |
| |
| <span class="attribute">#[cfg(not(target_vendor = <span class="string">"teaclave"</span>))] |
| </span><span class="kw">use </span>std::fs::File; |
| |
| <span class="attribute">#[cfg(target_vendor = <span class="string">"teaclave"</span>)] |
| </span><span class="kw">use </span>std::untrusted::fs::File; |
| |
| <span class="kw">use </span>std::io::prelude::<span class="kw-2">*</span>; |
| <span class="kw">use </span>std::io::BufReader; |
| |
| <span class="kw">use </span>serde_derive::{Deserialize, Serialize}; |
| |
| <span class="attribute">#[cfg(feature = <span class="string">"profiling"</span>)] |
| </span><span class="kw">use </span>time::PreciseTime; |
| |
| <span class="doccomment">/// The gradient boosting decision tree. |
| </span><span class="attribute">#[derive(Default, Serialize, Deserialize)] |
| </span><span class="kw">pub struct </span>GBDT { |
| <span class="doccomment">/// The config of gbdt. See [gbdt::config](../config/) for detail. |
| </span>conf: Config, |
| <span class="doccomment">/// The trained decision trees. |
| </span>trees: Vec<DecisionTree>, |
| <span class="doccomment">/// The bias estimated. |
| </span>bias: ValueType, |
| } |
| |
| <span class="kw">impl </span>GBDT { |
| <span class="doccomment">/// Return a new gbdt with manually set config. |
| /// |
| /// # Example |
| /// ```rust |
| /// use gbdt::config::Config; |
| /// use gbdt::gradient_boost::GBDT; |
| /// |
| /// // set config for algorithm |
| /// let mut cfg = Config::new(); |
| /// cfg.set_feature_size(3); |
| /// cfg.set_max_depth(2); |
| /// cfg.set_min_leaf_size(1); |
| /// cfg.set_loss("SquaredError"); |
| /// cfg.set_iterations(2); |
| /// |
| /// // initialize GBDT algorithm |
| /// let mut gbdt = GBDT::new(&cfg); |
| /// ``` |
| </span><span class="kw">pub fn </span>new(conf: <span class="kw-2">&</span>Config) -> GBDT { |
| GBDT { |
| conf: conf.clone(), |
| trees: Vec::new(), |
| bias: <span class="number">0.0</span>, |
| } |
| } |
| |
| <span class="doccomment">/// Return true if the data in the given data vector are all valid. In other case |
| /// returns false. |
| /// |
| /// We simply check whether the length of feature vector in each data |
| /// equals to the specified feature size in config. |
| </span><span class="attribute">#[cfg(feature = <span class="string">"enable_training"</span>)] |
| </span><span class="kw">fn </span>check_valid_data(<span class="kw-2">&</span><span class="self">self</span>, dv: <span class="kw-2">&</span>DataVec) -> bool { |
| dv.iter().all(|x| x.feature.len() == <span class="self">self</span>.conf.feature_size) |
| } |
| |
| <span class="doccomment">/// If initial_guess_enabled is set false in gbdt config, this function will calculate |
| /// bias for initial guess based on train data. Different methods will be used according |
| /// to different loss type. This is a private method and should not be called manually. |
| /// |
| /// # Panic |
| /// If specified length is greater than the length of data vector, it will panic. |
| /// |
| /// If there is invalid data that will confuse the training process, it will panic. |
| </span><span class="attribute">#[cfg(feature = <span class="string">"enable_training"</span>)] |
| </span><span class="kw">fn </span>init(<span class="kw-2">&mut </span><span class="self">self</span>, len: usize, dv: <span class="kw-2">&</span>DataVec) { |
| <span class="macro">assert!</span>(dv.len() >= len); |
| |
| <span class="kw">if </span>!<span class="self">self</span>.check_valid_data(<span class="kw-2">&</span>dv) { |
| <span class="macro">panic!</span>(<span class="string">"There are invalid data in data vector, check your data please."</span>); |
| } |
| |
| <span class="kw">if </span><span class="self">self</span>.conf.initial_guess_enabled { |
| <span class="kw">return</span>; |
| } |
| |
| <span class="self">self</span>.bias = <span class="kw">match </span><span class="self">self</span>.conf.loss { |
| Loss::SquaredError => label_average(dv, len), |
| Loss::LogLikelyhood => { |
| <span class="kw">let </span>v: ValueType = label_average(dv, len); |
| ((<span class="number">1.0 </span>+ v) / (<span class="number">1.0 </span>- v)).ln() / <span class="number">2.0 |
| </span>} |
| Loss::LAD => weighted_label_median(dv, len), |
| <span class="kw">_ </span>=> label_average(dv, len), |
| } |
| } |
| |
| <span class="doccomment">/// Fit the train data. |
| /// |
| /// First, initialize and configure decision trees. Then train the model with certain |
| /// iterations set by config. |
| /// |
| /// # Example |
| /// ```rust |
| /// use gbdt::config::Config; |
| /// use gbdt::gradient_boost::GBDT; |
| /// use gbdt::decision_tree::{Data, DataVec, PredVec, ValueType}; |
| /// |
| /// // set config for algorithm |
| /// let mut cfg = Config::new(); |
| /// cfg.set_feature_size(3); |
| /// cfg.set_max_depth(2); |
| /// cfg.set_min_leaf_size(1); |
| /// cfg.set_loss("SquaredError"); |
| /// cfg.set_iterations(2); |
| /// |
| /// // initialize GBDT algorithm |
| /// let mut gbdt = GBDT::new(&cfg); |
| /// |
| /// // setup training data |
| /// let data1 = Data::new_training_data ( |
| /// vec![1.0, 2.0, 3.0], |
| /// 1.0, |
| /// 1.0, |
| /// None |
| /// ); |
| /// let data2 = Data::new_training_data ( |
| /// vec![1.1, 2.1, 3.1], |
| /// 1.0, |
| /// 1.0, |
| /// None |
| /// ); |
| /// let data3 = Data::new_training_data ( |
| /// vec![2.0, 2.0, 1.0], |
| /// 1.0, |
| /// 2.0, |
| /// None |
| /// ); |
| /// let data4 = Data::new_training_data ( |
| /// vec![2.0, 2.3, 1.2], |
| /// 1.0, |
| /// 0.0, |
| /// None |
| /// ); |
| /// |
| /// let mut training_data: DataVec = Vec::new(); |
| /// training_data.push(data1.clone()); |
| /// training_data.push(data2.clone()); |
| /// training_data.push(data3.clone()); |
| /// training_data.push(data4.clone()); |
| /// |
| /// // train the decision trees. |
| /// gbdt.fit(&mut training_data); |
| /// ``` |
| </span><span class="attribute">#[cfg(feature = <span class="string">"enable_training"</span>)] |
| </span><span class="kw">pub fn </span>fit(<span class="kw-2">&mut </span><span class="self">self</span>, train_data: <span class="kw-2">&mut </span>DataVec) { |
| <span class="self">self</span>.trees = Vec::with_capacity(<span class="self">self</span>.conf.iterations); |
| <span class="comment">// initialize each decision tree |
| </span><span class="kw">for </span>i <span class="kw">in </span><span class="number">0</span>..<span class="self">self</span>.conf.iterations { |
| <span class="self">self</span>.trees.push(DecisionTree::new()); |
| <span class="self">self</span>.trees[i].set_feature_size(<span class="self">self</span>.conf.feature_size); |
| <span class="self">self</span>.trees[i].set_max_depth(<span class="self">self</span>.conf.max_depth); |
| <span class="self">self</span>.trees[i].set_min_leaf_size(<span class="self">self</span>.conf.min_leaf_size); |
| <span class="self">self</span>.trees[i].set_feature_sample_ratio(<span class="self">self</span>.conf.feature_sample_ratio); |
| <span class="self">self</span>.trees[i].set_loss(<span class="self">self</span>.conf.loss.clone()); |
| } |
| |
| <span class="comment">// number of samples for training |
| </span><span class="kw">let </span>nr_samples: usize = <span class="kw">if </span><span class="self">self</span>.conf.data_sample_ratio < <span class="number">1.0 </span>{ |
| ((train_data.len() <span class="kw">as </span>f64) * <span class="self">self</span>.conf.data_sample_ratio) <span class="kw">as </span>usize |
| } <span class="kw">else </span>{ |
| train_data.len() |
| }; |
| |
| <span class="self">self</span>.init(train_data.len(), <span class="kw-2">&</span>train_data); |
| |
| <span class="kw">let </span><span class="kw-2">mut </span>rng = thread_rng(); |
| <span class="comment">// initialize the predicted_cache, which records the predictions for training data |
| </span><span class="kw">let </span><span class="kw-2">mut </span>predicted_cache: PredVec = <span class="self">self</span>.predict_n(train_data, <span class="number">0</span>, <span class="number">0</span>, train_data.len()); |
| |
| <span class="attribute">#[cfg(feature = <span class="string">"profiling"</span>)] |
| </span><span class="kw">let </span>t1 = PreciseTime::now(); |
| |
| <span class="comment">// allocat the TrainingCache |
| </span><span class="kw">let </span><span class="kw-2">mut </span>cache = TrainingCache::get_cache( |
| <span class="self">self</span>.conf.feature_size, |
| <span class="kw-2">&</span>train_data, |
| <span class="self">self</span>.conf.training_optimization_level, |
| ); |
| |
| <span class="attribute">#[cfg(feature = <span class="string">"profiling"</span>)] |
| </span><span class="kw">let </span>t2 = PreciseTime::now(); |
| |
| <span class="attribute">#[cfg(feature = <span class="string">"profiling"</span>)] |
| </span><span class="macro">println!</span>(<span class="string">"cache {}"</span>, t1.to(t2)); |
| |
| <span class="kw">for </span>i <span class="kw">in </span><span class="number">0</span>..<span class="self">self</span>.conf.iterations { |
| <span class="attribute">#[cfg(feature = <span class="string">"profiling"</span>)] |
| </span><span class="kw">let </span>t1 = PreciseTime::now(); |
| |
| <span class="kw">let </span><span class="kw-2">mut </span>samples: Vec<usize> = (<span class="number">0</span>..train_data.len()).collect(); |
| <span class="comment">// randomly select some data for training |
| </span><span class="kw">let </span>(subset, remaining) = <span class="kw">if </span>nr_samples < train_data.len() { |
| samples.shuffle(<span class="kw-2">&mut </span>rng); |
| <span class="kw">let </span>(left, right) = samples.split_at(nr_samples); |
| <span class="kw">let </span><span class="kw-2">mut </span>left = left.to_vec(); |
| <span class="kw">let </span><span class="kw-2">mut </span>right = right.to_vec(); |
| left.sort(); |
| right.sort(); |
| (left, right) |
| } <span class="kw">else </span>{ |
| (samples, Vec::new()) |
| }; |
| |
| <span class="comment">// Update the target for training |
| </span><span class="kw">match </span><span class="self">self</span>.conf.loss { |
| Loss::SquaredError => { |
| <span class="self">self</span>.square_loss_process(train_data, train_data.len(), <span class="kw-2">&</span>predicted_cache) |
| } |
| Loss::LogLikelyhood => { |
| <span class="self">self</span>.log_loss_process(train_data, train_data.len(), <span class="kw-2">&</span>predicted_cache) |
| } |
| Loss::LAD => <span class="self">self</span>.lad_loss_process(train_data, train_data.len(), <span class="kw-2">&</span>predicted_cache), |
| |
| <span class="kw">_ </span>=> <span class="self">self</span>.square_loss_process(train_data, train_data.len(), <span class="kw-2">&</span>predicted_cache), |
| } |
| <span class="comment">// train a new decision tree |
| </span><span class="self">self</span>.trees[i].fit_n(train_data, <span class="kw-2">&</span>subset, <span class="kw-2">&mut </span>cache); |
| |
| <span class="comment">// update the predicted_cache for the data in the `subset` |
| </span><span class="kw">let </span>train_preds = cache.get_preds(); |
| <span class="kw">for </span>index <span class="kw">in </span>subset.iter() { |
| predicted_cache[<span class="kw-2">*</span>index] += train_preds[<span class="kw-2">*</span>index] * <span class="self">self</span>.conf.shrinkage; |
| } |
| <span class="comment">// update the predicted_cache for the data in the `remaining` |
| </span><span class="kw">let </span>predicted_tmp = <span class="self">self</span>.trees[i].predict_n(train_data, <span class="kw-2">&</span>remaining); |
| <span class="kw">for </span>index <span class="kw">in </span>remaining.iter() { |
| predicted_cache[<span class="kw-2">*</span>index] += predicted_tmp[<span class="kw-2">*</span>index] * <span class="self">self</span>.conf.shrinkage; |
| } |
| |
| <span class="comment">//output elapsed time |
| </span><span class="attribute">#[cfg(feature = <span class="string">"profiling"</span>)] |
| </span><span class="kw">let </span>t2 = PreciseTime::now(); |
| <span class="attribute">#[cfg(feature = <span class="string">"profiling"</span>)] |
| </span><span class="macro">println!</span>( |
| <span class="string">"iteration {} {} nodes: {}"</span>, |
| i, |
| t1.to(t2), |
| <span class="self">self</span>.trees[i].len() |
| ); |
| } |
| } |
| |
| <span class="doccomment">/// Predict the first `n` data in data vector with the [`begin`, `begin`+iters) trees. |
| /// |
| /// The output will be a vector, having same size as the `test_data`. The first n elements are the predicted values, the others are `VALUE_TYPE_UNKNOWN` |
| /// |
| /// Note that the result will not be normalized no matter what loss type is used. |
| /// |
| /// # Panic |
| /// If n is greater than the length of test data vector, it will panic. |
| /// |
| /// If the iterations is greater than the number of trees that have been trained, it will panic. |
| </span><span class="kw">fn </span>predict_n(<span class="kw-2">&</span><span class="self">self</span>, test_data: <span class="kw-2">&</span>DataVec, begin: usize, iters: usize, n: usize) -> PredVec { |
| <span class="macro">assert!</span>((begin + iters) <= <span class="self">self</span>.trees.len()); |
| <span class="macro">assert!</span>(n <= test_data.len()); |
| |
| <span class="kw">if </span><span class="self">self</span>.trees.is_empty() { |
| <span class="kw">return </span><span class="macro">vec!</span>[VALUE_TYPE_UNKNOWN; test_data.len()]; |
| } |
| |
| <span class="comment">// initialize the vector with bias/initial_guess |
| </span><span class="kw">let </span><span class="kw-2">mut </span>predicted: PredVec = <span class="kw">if </span>!<span class="self">self</span>.conf.initial_guess_enabled { |
| <span class="macro">vec!</span>[<span class="self">self</span>.bias; n] |
| } <span class="kw">else </span>{ |
| test_data.iter().take(n).map(|x| x.initial_guess).collect() |
| }; |
| |
| <span class="comment">// inference the data with individual decision tree. |
| </span><span class="kw">let </span>subset: Vec<usize> = (<span class="number">0</span>..n).collect(); |
| <span class="kw">for </span>i <span class="kw">in </span>begin..(iters + begin) { |
| <span class="kw">let </span>v: PredVec = <span class="self">self</span>.trees[i].predict_n(<span class="kw-2">&</span>test_data, <span class="kw-2">&</span>subset); |
| <span class="kw">for </span>(e, v) <span class="kw">in </span>predicted.iter_mut().take(n).zip(v.iter()) { |
| <span class="kw-2">*</span>e += <span class="self">self</span>.conf.shrinkage * v; |
| } |
| } |
| predicted |
| } |
| |
| <span class="doccomment">/// Predict the given data. |
| /// |
| /// Note that for log likelyhood loss type, the predicted value will be |
| /// normalized between 0 and 1, which is the possibility of label 1 |
| /// |
| /// # Example |
| /// ```rust |
| /// use gbdt::config::Config; |
| /// use gbdt::gradient_boost::GBDT; |
| /// use gbdt::decision_tree::{Data, DataVec, PredVec, ValueType}; |
| /// |
| /// // set config for algorithm |
| /// let mut cfg = Config::new(); |
| /// cfg.set_feature_size(3); |
| /// cfg.set_max_depth(2); |
| /// cfg.set_min_leaf_size(1); |
| /// cfg.set_loss("SquaredError"); |
| /// cfg.set_iterations(2); |
| /// |
| /// // initialize GBDT algorithm |
| /// let mut gbdt = GBDT::new(&cfg); |
| /// |
| /// // setup training data |
| /// let data1 = Data::new_training_data ( |
| /// vec![1.0, 2.0, 3.0], |
| /// 1.0, |
| /// 1.0, |
| /// None |
| /// ); |
| /// let data2 = Data::new_training_data ( |
| /// vec![1.1, 2.1, 3.1], |
| /// 1.0, |
| /// 1.0, |
| /// None |
| /// ); |
| /// let data3 = Data::new_training_data ( |
| /// vec![2.0, 2.0, 1.0], |
| /// 1.0, |
| /// 2.0, |
| /// None |
| /// ); |
| /// let data4 = Data::new_training_data ( |
| /// vec![2.0, 2.3, 1.2], |
| /// 1.0, |
| /// 0.0, |
| /// None |
| /// ); |
| /// |
| /// let mut training_data: DataVec = Vec::new(); |
| /// training_data.push(data1.clone()); |
| /// training_data.push(data2.clone()); |
| /// training_data.push(data3.clone()); |
| /// training_data.push(data4.clone()); |
| /// |
| /// // train the decision trees. |
| /// gbdt.fit(&mut training_data); |
| /// |
| /// // setup the test data |
| /// |
| /// let mut test_data: DataVec = Vec::new(); |
| /// test_data.push(data1.clone()); |
| /// test_data.push(data2.clone()); |
| /// test_data.push(data3.clone()); |
| /// test_data.push(data4.clone()); |
| /// |
| /// println!("{:?}", gbdt.predict(&test_data)); |
| /// ``` |
| /// |
| /// # Panic |
| /// If the training process is not completed, thus, the number of trees that have been |
| /// is less than the iteration configuration in `self.conf`, it will panic. |
| </span><span class="kw">pub fn </span>predict(<span class="kw-2">&</span><span class="self">self</span>, test_data: <span class="kw-2">&</span>DataVec) -> PredVec { |
| <span class="macro">assert_eq!</span>(<span class="self">self</span>.conf.iterations, <span class="self">self</span>.trees.len()); |
| <span class="kw">let </span>predicted = <span class="self">self</span>.predict_n(test_data, <span class="number">0</span>, <span class="self">self</span>.conf.iterations, test_data.len()); |
| |
| <span class="kw">match </span><span class="self">self</span>.conf.loss { |
| Loss::LogLikelyhood => predicted |
| .iter() |
| .map(|x| { |
| <span class="comment">//if (1.0 / (1.0 + ((-2.0 * x).exp()))) >= 0.5 { |
| // 1.0 |
| //} else { |
| // -1.0 |
| //} |
| </span><span class="number">1.0 </span>/ (<span class="number">1.0 </span>+ ((-<span class="number">2.0 </span>* x).exp())) |
| }) |
| .collect(), |
| Loss::BinaryLogistic | Loss::RegLogistic => { |
| predicted.iter().map(|x| <span class="number">1.0 </span>/ (<span class="number">1.0 </span>+ (-x).exp())).collect() |
| } |
| <span class="kw">_ </span>=> predicted, |
| } |
| } |
| |
| <span class="doccomment">/// Predict multi class data and return the probabilities for each class. The loss type should be "multi:softmax" or "multi:softprob" |
| /// |
| /// test_data: the test set |
| /// |
| /// class_num: the number of class |
| /// |
| /// output: the predicted class label, the predicted possiblity for each class |
| /// |
| /// # Example |
| /// |
| /// ```rust |
| /// use gbdt::gradient_boost::GBDT; |
| /// use gbdt::input::{load, InputFormat}; |
| /// use gbdt::decision_tree::DataVec; |
| /// let gbdt = |
| /// GBDT::from_xgoost_dump("xgb-data/xgb_multi_softmax/gbdt.model", "multi:softmax").unwrap(); |
| /// let test_file = "xgb-data/xgb_multi_softmax/dermatology.data.test"; |
| /// let mut fmt = InputFormat::csv_format(); |
| /// fmt.set_label_index(34); |
| /// let test_data: DataVec = load(test_file, fmt).unwrap(); |
| /// let (labels, probs) = gbdt.predict_multiclass(&test_data, 6); |
| /// ``` |
| </span><span class="kw">pub fn </span>predict_multiclass( |
| <span class="kw-2">&</span><span class="self">self</span>, |
| test_data: <span class="kw-2">&</span>DataVec, |
| class_num: usize, |
| ) -> (Vec<usize>, Vec<Vec<ValueType>>) { |
| <span class="macro">assert_eq!</span>(<span class="self">self</span>.conf.iterations, <span class="self">self</span>.trees.len()); |
| <span class="macro">assert_eq!</span>(<span class="self">self</span>.trees.len() % class_num, <span class="number">0</span>); |
| |
| <span class="comment">// this api is used for xgboost's model, so shrinkage is 1.0 |
| // and config.initial_guess is false |
| </span><span class="kw">let </span><span class="kw-2">mut </span>probs: Vec<Vec<ValueType>> = Vec::with_capacity(test_data.len()); |
| <span class="comment">// initialize the vector with bias value |
| </span><span class="kw">for </span>_index <span class="kw">in </span><span class="number">0</span>..test_data.len() { |
| probs.push(<span class="macro">vec!</span>[<span class="self">self</span>.bias; class_num]); |
| } |
| |
| <span class="comment">// compute the raw predicted values for each class |
| </span><span class="kw">for </span>(index, tree) <span class="kw">in </span><span class="self">self</span>.trees.iter().enumerate() { |
| <span class="kw">let </span>preds = tree.predict(test_data); |
| <span class="kw">for </span>(x, y) <span class="kw">in </span>probs.iter_mut().zip(preds.iter()) { |
| x[index % class_num] += y; |
| } |
| } |
| <span class="kw">let </span><span class="kw-2">mut </span>labels = <span class="macro">vec!</span>[<span class="number">0</span>; test_data.len()]; |
| <span class="comment">// normalize the predicted probilities and compute the label |
| </span><span class="kw">for </span>(elem_index, elem) <span class="kw">in </span>probs.iter_mut().enumerate() { |
| <span class="kw">let </span><span class="kw-2">mut </span>sum: ValueType = <span class="number">0.0</span>; |
| <span class="kw">let </span><span class="kw-2">mut </span>max_value = VALUE_TYPE_MIN; |
| <span class="kw">let </span><span class="kw-2">mut </span>max_index = <span class="number">0</span>; |
| <span class="kw">let </span><span class="kw-2">mut </span>prob_vec = <span class="macro">vec!</span>[<span class="number">0.0</span>; class_num]; |
| <span class="kw">for </span>(index, item) <span class="kw">in </span>elem.iter().enumerate() { |
| <span class="kw">let </span>v = item.exp(); |
| prob_vec[index] = v; |
| sum += v; |
| <span class="kw">if </span>v > max_value { |
| max_index = index; |
| max_value = v; |
| } |
| } |
| <span class="kw">for </span>item <span class="kw">in </span>prob_vec.iter_mut() { |
| <span class="kw-2">*</span>item /= sum; |
| } |
| <span class="kw-2">*</span>elem = prob_vec; |
| labels[elem_index] = max_index; |
| } |
| (labels, probs) |
| } |
| |
| <span class="doccomment">/// Print the tress for debug |
| /// |
| /// # Example |
| /// ```rust |
| /// use gbdt::config::Config; |
| /// use gbdt::gradient_boost::GBDT; |
| /// use gbdt::decision_tree::{Data, DataVec, PredVec, ValueType}; |
| /// |
| /// // set config for algorithm |
| /// let mut cfg = Config::new(); |
| /// cfg.set_feature_size(3); |
| /// cfg.set_max_depth(2); |
| /// cfg.set_min_leaf_size(1); |
| /// cfg.set_loss("SquaredError"); |
| /// cfg.set_iterations(2); |
| /// |
| /// // initialize GBDT algorithm |
| /// let mut gbdt = GBDT::new(&cfg); |
| /// |
| /// // setup training data |
| /// let data1 = Data::new_training_data ( |
| /// vec![1.0, 2.0, 3.0], |
| /// 1.0, |
| /// 1.0, |
| /// None |
| /// ); |
| /// let data2 = Data::new_training_data ( |
| /// vec![1.1, 2.1, 3.1], |
| /// 1.0, |
| /// 1.0, |
| /// None |
| /// ); |
| /// let data3 = Data::new_training_data ( |
| /// vec![2.0, 2.0, 1.0], |
| /// 1.0, |
| /// 2.0, |
| /// None |
| /// ); |
| /// let data4 = Data::new_training_data ( |
| /// vec![2.0, 2.3, 1.2], |
| /// 1.0, |
| /// 0.0, |
| /// None |
| /// ); |
| /// |
| /// let mut dv: DataVec = Vec::new(); |
| /// dv.push(data1.clone()); |
| /// dv.push(data2.clone()); |
| /// dv.push(data3.clone()); |
| /// dv.push(data4.clone()); |
| /// |
| /// // train the decision trees. |
| /// gbdt.fit(&mut dv); |
| /// |
| /// // print the tree. |
| /// gbdt.print_trees(); |
| /// ``` |
| </span><span class="kw">pub fn </span>print_trees(<span class="kw-2">&</span><span class="self">self</span>) { |
| <span class="kw">for </span>i <span class="kw">in </span><span class="number">0</span>..<span class="self">self</span>.trees.len() { |
| <span class="self">self</span>.trees[i].print(); |
| } |
| } |
| |
| <span class="doccomment">/// This is the process to calculate the residual as the target in next iteration |
| /// for squared error loss. |
| </span><span class="attribute">#[cfg(feature = <span class="string">"enable_training"</span>)] |
| </span><span class="kw">fn </span>square_loss_process(<span class="kw-2">&</span><span class="self">self</span>, dv: <span class="kw-2">&mut </span>DataVec, samples: usize, predicted: <span class="kw-2">&</span>PredVec) { |
| <span class="kw">for </span>i <span class="kw">in </span><span class="number">0</span>..samples { |
| dv[i].target = dv[i].label - predicted[i]; |
| } |
| <span class="kw">if </span><span class="self">self</span>.conf.debug { |
| <span class="macro">println!</span>(<span class="string">"RMSE = {}"</span>, RMSE(<span class="kw-2">&</span>dv, <span class="kw-2">&</span>predicted, samples)); |
| } |
| } |
| |
| <span class="doccomment">/// This is the process to calculate the residual as the target in next iteration |
| /// for negative binomial log-likehood loss. |
| </span><span class="attribute">#[cfg(feature = <span class="string">"enable_training"</span>)] |
| </span><span class="kw">fn </span>log_loss_process(<span class="kw-2">&</span><span class="self">self</span>, dv: <span class="kw-2">&mut </span>DataVec, samples: usize, predicted: <span class="kw-2">&</span>PredVec) { |
| <span class="kw">for </span>i <span class="kw">in </span><span class="number">0</span>..samples { |
| dv[i].target = logit_loss_gradient(dv[i].label, predicted[i]); |
| } |
| <span class="kw">if </span><span class="self">self</span>.conf.debug { |
| <span class="kw">let </span>normalized_preds = predicted |
| .iter() |
| .map(|x| <span class="number">1.0 </span>/ (<span class="number">1.0 </span>+ ((-<span class="number">2.0 </span>* x).exp()))) |
| .collect(); |
| <span class="macro">println!</span>(<span class="string">"AUC = {}"</span>, AUC(<span class="kw-2">&</span>dv, <span class="kw-2">&</span>normalized_preds, dv.len())); |
| } |
| } |
| |
| <span class="doccomment">/// This is the process to calculate the residual as the target in next iteration |
| /// for LAD loss. |
| </span><span class="attribute">#[cfg(feature = <span class="string">"enable_training"</span>)] |
| </span><span class="kw">fn </span>lad_loss_process(<span class="kw-2">&</span><span class="self">self</span>, dv: <span class="kw-2">&mut </span>DataVec, samples: usize, predicted: <span class="kw-2">&</span>PredVec) { |
| <span class="kw">for </span>i <span class="kw">in </span><span class="number">0</span>..samples { |
| dv[i].residual = dv[i].label - predicted[i]; |
| dv[i].target = <span class="kw">if </span>dv[i].residual >= <span class="number">0.0 </span>{ <span class="number">1.0 </span>} <span class="kw">else </span>{ -<span class="number">1.0 </span>}; |
| } |
| <span class="kw">if </span><span class="self">self</span>.conf.debug { |
| <span class="macro">println!</span>(<span class="string">"MAE {}"</span>, MAE(<span class="kw-2">&</span>dv, <span class="kw-2">&</span>predicted, samples)); |
| } |
| } |
| |
| <span class="doccomment">/// Save the model to a file using serde. |
| /// |
| /// # Example |
| /// ```rust |
| /// use gbdt::config::Config; |
| /// use gbdt::gradient_boost::GBDT; |
| /// use gbdt::decision_tree::{Data, DataVec, PredVec, ValueType}; |
| /// |
| /// // set config for algorithm |
| /// let mut cfg = Config::new(); |
| /// cfg.set_feature_size(3); |
| /// cfg.set_max_depth(2); |
| /// cfg.set_min_leaf_size(1); |
| /// cfg.set_loss("SquaredError"); |
| /// cfg.set_iterations(2); |
| /// |
| /// // initialize GBDT algorithm |
| /// let mut gbdt = GBDT::new(&cfg); |
| /// |
| /// // setup training data |
| /// let data1 = Data::new_training_data ( |
| /// vec![1.0, 2.0, 3.0], |
| /// 1.0, |
| /// 1.0, |
| /// None |
| /// ); |
| /// let data2 = Data::new_training_data ( |
| /// vec![1.1, 2.1, 3.1], |
| /// 1.0, |
| /// 1.0, |
| /// None |
| /// ); |
| /// let data3 = Data::new_training_data ( |
| /// vec![2.0, 2.0, 1.0], |
| /// 1.0, |
| /// 2.0, |
| /// None |
| /// ); |
| /// let data4 = Data::new_training_data ( |
| /// vec![2.0, 2.3, 1.2], |
| /// 1.0, |
| /// 0.0, |
| /// None |
| /// ); |
| /// |
| /// let mut dv: DataVec = Vec::new(); |
| /// dv.push(data1.clone()); |
| /// dv.push(data2.clone()); |
| /// dv.push(data3.clone()); |
| /// dv.push(data4.clone()); |
| /// |
| /// // train the decision trees. |
| /// gbdt.fit(&mut dv); |
| /// |
| /// // Save model. |
| /// // gbdt.save_model("gbdt.model"); |
| /// ``` |
| </span><span class="kw">pub fn </span>save_model(<span class="kw-2">&</span><span class="self">self</span>, filename: <span class="kw-2">&</span>str) -> <span class="prelude-ty">Result</span><()> { |
| <span class="kw">let </span><span class="kw-2">mut </span>file = File::create(filename)<span class="question-mark">?</span>; |
| <span class="kw">let </span>serialized = serde_json::to_string(<span class="self">self</span>)<span class="question-mark">?</span>; |
| file.write_all(serialized.as_bytes())<span class="question-mark">?</span>; |
| |
| <span class="prelude-val">Ok</span>(()) |
| } |
| |
| <span class="doccomment">/// Load the model from the file. |
| /// |
| /// # Example |
| /// |
| /// ```rust |
| /// use gbdt::gradient_boost::GBDT; |
| /// //let gbdt = GBDT::load_model("./gbdt-rs.model").unwrap(); |
| /// ``` |
| /// |
| /// # Error |
| /// Error when get exception during model file parsing or deserialize. |
| </span><span class="kw">pub fn </span>load_model(filename: <span class="kw-2">&</span>str) -> <span class="prelude-ty">Result</span><<span class="self">Self</span>> { |
| <span class="kw">let </span><span class="kw-2">mut </span>file = File::open(filename)<span class="question-mark">?</span>; |
| <span class="kw">let </span><span class="kw-2">mut </span>contents = String::new(); |
| file.read_to_string(<span class="kw-2">&mut </span>contents)<span class="question-mark">?</span>; |
| <span class="kw">let </span>ret: <span class="self">Self </span>= serde_json::from_str(<span class="kw-2">&</span>contents)<span class="question-mark">?</span>; |
| <span class="prelude-val">Ok</span>(ret) |
| } |
| |
| <span class="doccomment">/// Load the model from xgboost's model using a path. The xgboost's model should be converted by "convert_xgboost.py" |
| /// |
| /// # Example |
| /// |
| /// ```rust |
| /// use gbdt::gradient_boost::GBDT; |
| /// let gbdt = |
| /// GBDT::from_xgoost_dump("xgb-data/xgb_binary_logistic/gbdt.model", "binary:logistic").unwrap(); |
| /// ``` |
| /// |
| /// # Error |
| /// Error when get exception during model file parsing. |
| </span><span class="kw">pub fn </span>from_xgoost_dump(model_file: <span class="kw-2">&</span>str, objective: <span class="kw-2">&</span>str) -> <span class="prelude-ty">Result</span><<span class="self">Self</span>> { |
| <span class="kw">let </span>tree_file = File::open(<span class="kw-2">&</span>model_file)<span class="question-mark">?</span>; |
| <span class="kw">let </span>reader = BufReader::new(tree_file); |
| <span class="self">Self</span>::from_xgoost_reader(reader, objective) |
| } |
| |
| <span class="doccomment">/// Load the model from xgboost's model using a reader. The xgboost's model should be converted by "convert_xgboost.py" |
| /// |
| /// # Example |
| /// |
| /// ```rust |
| /// use gbdt::gradient_boost::GBDT; |
| /// let gbdt = |
| /// GBDT::from_xgoost_reader(std::io::Cursor::new(include_str!("xgb-data/xgb_binary_logistic/gbdt.model")), "binary:logistic").unwrap(); |
| /// ``` |
| /// |
| /// # Error |
| /// Error when get exception during model parsing. |
| </span><span class="kw">pub fn </span>from_xgoost_reader<R>(reader: R, objective: <span class="kw-2">&</span>str) -> <span class="prelude-ty">Result</span><<span class="self">Self</span>> |
| <span class="kw">where |
| </span>R: std::io::BufRead, |
| { |
| <span class="kw">let </span><span class="kw-2">mut </span>all_lines: Vec<String> = Vec::new(); |
| <span class="kw">let </span><span class="kw-2">mut </span>has_read_score = <span class="bool-val">false</span>; |
| <span class="kw">let </span><span class="kw-2">mut </span>base_score: ValueType = <span class="number">0.0</span>; |
| <span class="kw">for </span>line <span class="kw">in </span>reader.lines() { |
| <span class="comment">// read base score |
| </span><span class="kw">if </span>!has_read_score { |
| has_read_score = <span class="bool-val">true</span>; |
| base_score = line<span class="question-mark">?</span>.parse::<ValueType>()<span class="question-mark">?</span>; |
| <span class="kw">continue</span>; |
| } |
| <span class="comment">// read trees |
| </span><span class="kw">let </span>value: String = line<span class="question-mark">?</span>; |
| all_lines.push(value); |
| } |
| <span class="kw">let </span>single_line = all_lines.join(<span class="string">""</span>); |
| <span class="kw">let </span>json_obj: serde_json::Value = serde_json::from_str(<span class="kw-2">&</span>single_line)<span class="question-mark">?</span>; |
| |
| <span class="kw">let </span>nodes = json_obj.as_array().ok_or_else(|| <span class="string">"parse trees error"</span>)<span class="question-mark">?</span>; |
| |
| <span class="kw">let </span><span class="kw-2">mut </span>cfg = Config::new(); |
| cfg.set_loss(objective); |
| cfg.set_iterations(nodes.len()); |
| cfg.shrinkage = <span class="number">1.0</span>; |
| <span class="kw">let </span><span class="kw-2">mut </span>gbdt = GBDT::new(<span class="kw-2">&</span>cfg); |
| gbdt.bias = base_score; |
| |
| <span class="comment">// load trees |
| </span><span class="kw">for </span>node <span class="kw">in </span>nodes.iter() { |
| <span class="kw">let </span>tree = DecisionTree::get_from_xgboost(node)<span class="question-mark">?</span>; |
| gbdt.trees.push(tree); |
| } |
| <span class="prelude-val">Ok</span>(gbdt) |
| } |
| } |
| </code></pre></div> |
| </section></div></main><div id="rustdoc-vars" data-root-path="../../" data-current-crate="gbdt" data-themes="ayu,dark,light" data-resource-suffix="" data-rustdoc-version="1.66.0-nightly (5c8bff74b 2022-10-21)" ></div></body></html> |