| <!DOCTYPE html><html lang="en"><head><meta charset="utf-8"><meta name="viewport" content="width=device-width, initial-scale=1.0"><meta name="generator" content="rustdoc"><meta name="description" content="Source of the Rust file `/root/.cargo/git/checkouts/incubator-teaclave-crates-c8106113f74feefc/ede1f68/rusty-machine/src/learning/optim/grad_desc.rs`."><meta name="keywords" content="rust, rustlang, rust-lang"><title>grad_desc.rs - source</title><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../../SourceSerif4-Regular.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../../FiraSans-Regular.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../../FiraSans-Medium.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../../SourceCodePro-Regular.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../../SourceSerif4-Bold.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../../SourceCodePro-Semibold.ttf.woff2"><link rel="stylesheet" href="../../../../normalize.css"><link rel="stylesheet" href="../../../../rustdoc.css" id="mainThemeStyle"><link rel="stylesheet" href="../../../../ayu.css" disabled><link rel="stylesheet" href="../../../../dark.css" disabled><link rel="stylesheet" href="../../../../light.css" id="themeStyle"><script id="default-settings" ></script><script src="../../../../storage.js"></script><script defer src="../../../../source-script.js"></script><script defer src="../../../../source-files.js"></script><script defer src="../../../../main.js"></script><noscript><link rel="stylesheet" href="../../../../noscript.css"></noscript><link rel="alternate icon" type="image/png" href="../../../../favicon-16x16.png"><link rel="alternate icon" type="image/png" href="../../../../favicon-32x32.png"><link rel="icon" type="image/svg+xml" href="../../../../favicon.svg"></head><body class="rustdoc source"><!--[if lte IE 11]><div class="warning">This old browser is unsupported and will most likely display funky things.</div><![endif]--><nav class="sidebar"><a class="sidebar-logo" href="../../../../rusty_machine/index.html"><div class="logo-container"><img class="rust-logo" src="../../../../rust-logo.svg" alt="logo"></div></a></nav><main><div class="width-limiter"><nav class="sub"><a class="sub-logo-container" href="../../../../rusty_machine/index.html"><img class="rust-logo" src="../../../../rust-logo.svg" alt="logo"></a><form class="search-form"><div class="search-container"><span></span><input class="search-input" name="search" autocomplete="off" spellcheck="false" placeholder="Click or press ‘S’ to search, ‘?’ for more options…" type="search"><div id="help-button" title="help" tabindex="-1"><a href="../../../../help.html">?</a></div><div id="settings-menu" tabindex="-1"><a href="../../../../settings.html" title="settings"><img width="22" height="22" alt="Change settings" src="../../../../wheel.svg"></a></div></div></form></nav><section id="main-content" class="content"><div class="example-wrap"><pre class="src-line-numbers"><span id="1">1</span> |
| <span id="2">2</span> |
| <span id="3">3</span> |
| <span id="4">4</span> |
| <span id="5">5</span> |
| <span id="6">6</span> |
| <span id="7">7</span> |
| <span id="8">8</span> |
| <span id="9">9</span> |
| <span id="10">10</span> |
| <span id="11">11</span> |
| <span id="12">12</span> |
| <span id="13">13</span> |
| <span id="14">14</span> |
| <span id="15">15</span> |
| <span id="16">16</span> |
| <span id="17">17</span> |
| <span id="18">18</span> |
| <span id="19">19</span> |
| <span id="20">20</span> |
| <span id="21">21</span> |
| <span id="22">22</span> |
| <span id="23">23</span> |
| <span id="24">24</span> |
| <span id="25">25</span> |
| <span id="26">26</span> |
| <span id="27">27</span> |
| <span id="28">28</span> |
| <span id="29">29</span> |
| <span id="30">30</span> |
| <span id="31">31</span> |
| <span id="32">32</span> |
| <span id="33">33</span> |
| <span id="34">34</span> |
| <span id="35">35</span> |
| <span id="36">36</span> |
| <span id="37">37</span> |
| <span id="38">38</span> |
| <span id="39">39</span> |
| <span id="40">40</span> |
| <span id="41">41</span> |
| <span id="42">42</span> |
| <span id="43">43</span> |
| <span id="44">44</span> |
| <span id="45">45</span> |
| <span id="46">46</span> |
| <span id="47">47</span> |
| <span id="48">48</span> |
| <span id="49">49</span> |
| <span id="50">50</span> |
| <span id="51">51</span> |
| <span id="52">52</span> |
| <span id="53">53</span> |
| <span id="54">54</span> |
| <span id="55">55</span> |
| <span id="56">56</span> |
| <span id="57">57</span> |
| <span id="58">58</span> |
| <span id="59">59</span> |
| <span id="60">60</span> |
| <span id="61">61</span> |
| <span id="62">62</span> |
| <span id="63">63</span> |
| <span id="64">64</span> |
| <span id="65">65</span> |
| <span id="66">66</span> |
| <span id="67">67</span> |
| <span id="68">68</span> |
| <span id="69">69</span> |
| <span id="70">70</span> |
| <span id="71">71</span> |
| <span id="72">72</span> |
| <span id="73">73</span> |
| <span id="74">74</span> |
| <span id="75">75</span> |
| <span id="76">76</span> |
| <span id="77">77</span> |
| <span id="78">78</span> |
| <span id="79">79</span> |
| <span id="80">80</span> |
| <span id="81">81</span> |
| <span id="82">82</span> |
| <span id="83">83</span> |
| <span id="84">84</span> |
| <span id="85">85</span> |
| <span id="86">86</span> |
| <span id="87">87</span> |
| <span id="88">88</span> |
| <span id="89">89</span> |
| <span id="90">90</span> |
| <span id="91">91</span> |
| <span id="92">92</span> |
| <span id="93">93</span> |
| <span id="94">94</span> |
| <span id="95">95</span> |
| <span id="96">96</span> |
| <span id="97">97</span> |
| <span id="98">98</span> |
| <span id="99">99</span> |
| <span id="100">100</span> |
| <span id="101">101</span> |
| <span id="102">102</span> |
| <span id="103">103</span> |
| <span id="104">104</span> |
| <span id="105">105</span> |
| <span id="106">106</span> |
| <span id="107">107</span> |
| <span id="108">108</span> |
| <span id="109">109</span> |
| <span id="110">110</span> |
| <span id="111">111</span> |
| <span id="112">112</span> |
| <span id="113">113</span> |
| <span id="114">114</span> |
| <span id="115">115</span> |
| <span id="116">116</span> |
| <span id="117">117</span> |
| <span id="118">118</span> |
| <span id="119">119</span> |
| <span id="120">120</span> |
| <span id="121">121</span> |
| <span id="122">122</span> |
| <span id="123">123</span> |
| <span id="124">124</span> |
| <span id="125">125</span> |
| <span id="126">126</span> |
| <span id="127">127</span> |
| <span id="128">128</span> |
| <span id="129">129</span> |
| <span id="130">130</span> |
| <span id="131">131</span> |
| <span id="132">132</span> |
| <span id="133">133</span> |
| <span id="134">134</span> |
| <span id="135">135</span> |
| <span id="136">136</span> |
| <span id="137">137</span> |
| <span id="138">138</span> |
| <span id="139">139</span> |
| <span id="140">140</span> |
| <span id="141">141</span> |
| <span id="142">142</span> |
| <span id="143">143</span> |
| <span id="144">144</span> |
| <span id="145">145</span> |
| <span id="146">146</span> |
| <span id="147">147</span> |
| <span id="148">148</span> |
| <span id="149">149</span> |
| <span id="150">150</span> |
| <span id="151">151</span> |
| <span id="152">152</span> |
| <span id="153">153</span> |
| <span id="154">154</span> |
| <span id="155">155</span> |
| <span id="156">156</span> |
| <span id="157">157</span> |
| <span id="158">158</span> |
| <span id="159">159</span> |
| <span id="160">160</span> |
| <span id="161">161</span> |
| <span id="162">162</span> |
| <span id="163">163</span> |
| <span id="164">164</span> |
| <span id="165">165</span> |
| <span id="166">166</span> |
| <span id="167">167</span> |
| <span id="168">168</span> |
| <span id="169">169</span> |
| <span id="170">170</span> |
| <span id="171">171</span> |
| <span id="172">172</span> |
| <span id="173">173</span> |
| <span id="174">174</span> |
| <span id="175">175</span> |
| <span id="176">176</span> |
| <span id="177">177</span> |
| <span id="178">178</span> |
| <span id="179">179</span> |
| <span id="180">180</span> |
| <span id="181">181</span> |
| <span id="182">182</span> |
| <span id="183">183</span> |
| <span id="184">184</span> |
| <span id="185">185</span> |
| <span id="186">186</span> |
| <span id="187">187</span> |
| <span id="188">188</span> |
| <span id="189">189</span> |
| <span id="190">190</span> |
| <span id="191">191</span> |
| <span id="192">192</span> |
| <span id="193">193</span> |
| <span id="194">194</span> |
| <span id="195">195</span> |
| <span id="196">196</span> |
| <span id="197">197</span> |
| <span id="198">198</span> |
| <span id="199">199</span> |
| <span id="200">200</span> |
| <span id="201">201</span> |
| <span id="202">202</span> |
| <span id="203">203</span> |
| <span id="204">204</span> |
| <span id="205">205</span> |
| <span id="206">206</span> |
| <span id="207">207</span> |
| <span id="208">208</span> |
| <span id="209">209</span> |
| <span id="210">210</span> |
| <span id="211">211</span> |
| <span id="212">212</span> |
| <span id="213">213</span> |
| <span id="214">214</span> |
| <span id="215">215</span> |
| <span id="216">216</span> |
| <span id="217">217</span> |
| <span id="218">218</span> |
| <span id="219">219</span> |
| <span id="220">220</span> |
| <span id="221">221</span> |
| <span id="222">222</span> |
| <span id="223">223</span> |
| <span id="224">224</span> |
| <span id="225">225</span> |
| <span id="226">226</span> |
| <span id="227">227</span> |
| <span id="228">228</span> |
| <span id="229">229</span> |
| <span id="230">230</span> |
| <span id="231">231</span> |
| <span id="232">232</span> |
| <span id="233">233</span> |
| <span id="234">234</span> |
| <span id="235">235</span> |
| <span id="236">236</span> |
| <span id="237">237</span> |
| <span id="238">238</span> |
| <span id="239">239</span> |
| <span id="240">240</span> |
| <span id="241">241</span> |
| <span id="242">242</span> |
| <span id="243">243</span> |
| <span id="244">244</span> |
| <span id="245">245</span> |
| <span id="246">246</span> |
| <span id="247">247</span> |
| <span id="248">248</span> |
| <span id="249">249</span> |
| <span id="250">250</span> |
| <span id="251">251</span> |
| <span id="252">252</span> |
| <span id="253">253</span> |
| <span id="254">254</span> |
| <span id="255">255</span> |
| <span id="256">256</span> |
| <span id="257">257</span> |
| <span id="258">258</span> |
| <span id="259">259</span> |
| <span id="260">260</span> |
| <span id="261">261</span> |
| <span id="262">262</span> |
| <span id="263">263</span> |
| <span id="264">264</span> |
| <span id="265">265</span> |
| <span id="266">266</span> |
| <span id="267">267</span> |
| <span id="268">268</span> |
| <span id="269">269</span> |
| <span id="270">270</span> |
| <span id="271">271</span> |
| <span id="272">272</span> |
| <span id="273">273</span> |
| <span id="274">274</span> |
| <span id="275">275</span> |
| <span id="276">276</span> |
| <span id="277">277</span> |
| <span id="278">278</span> |
| <span id="279">279</span> |
| <span id="280">280</span> |
| <span id="281">281</span> |
| <span id="282">282</span> |
| <span id="283">283</span> |
| <span id="284">284</span> |
| <span id="285">285</span> |
| <span id="286">286</span> |
| <span id="287">287</span> |
| <span id="288">288</span> |
| <span id="289">289</span> |
| <span id="290">290</span> |
| <span id="291">291</span> |
| <span id="292">292</span> |
| <span id="293">293</span> |
| <span id="294">294</span> |
| <span id="295">295</span> |
| <span id="296">296</span> |
| <span id="297">297</span> |
| <span id="298">298</span> |
| <span id="299">299</span> |
| <span id="300">300</span> |
| <span id="301">301</span> |
| <span id="302">302</span> |
| <span id="303">303</span> |
| <span id="304">304</span> |
| <span id="305">305</span> |
| <span id="306">306</span> |
| <span id="307">307</span> |
| <span id="308">308</span> |
| <span id="309">309</span> |
| <span id="310">310</span> |
| <span id="311">311</span> |
| <span id="312">312</span> |
| <span id="313">313</span> |
| <span id="314">314</span> |
| <span id="315">315</span> |
| <span id="316">316</span> |
| <span id="317">317</span> |
| <span id="318">318</span> |
| <span id="319">319</span> |
| <span id="320">320</span> |
| <span id="321">321</span> |
| <span id="322">322</span> |
| <span id="323">323</span> |
| <span id="324">324</span> |
| <span id="325">325</span> |
| <span id="326">326</span> |
| <span id="327">327</span> |
| <span id="328">328</span> |
| <span id="329">329</span> |
| <span id="330">330</span> |
| <span id="331">331</span> |
| <span id="332">332</span> |
| <span id="333">333</span> |
| <span id="334">334</span> |
| <span id="335">335</span> |
| <span id="336">336</span> |
| <span id="337">337</span> |
| <span id="338">338</span> |
| <span id="339">339</span> |
| <span id="340">340</span> |
| <span id="341">341</span> |
| <span id="342">342</span> |
| <span id="343">343</span> |
| <span id="344">344</span> |
| <span id="345">345</span> |
| <span id="346">346</span> |
| <span id="347">347</span> |
| <span id="348">348</span> |
| <span id="349">349</span> |
| <span id="350">350</span> |
| <span id="351">351</span> |
| <span id="352">352</span> |
| <span id="353">353</span> |
| <span id="354">354</span> |
| <span id="355">355</span> |
| <span id="356">356</span> |
| <span id="357">357</span> |
| <span id="358">358</span> |
| <span id="359">359</span> |
| <span id="360">360</span> |
| <span id="361">361</span> |
| <span id="362">362</span> |
| <span id="363">363</span> |
| <span id="364">364</span> |
| <span id="365">365</span> |
| <span id="366">366</span> |
| <span id="367">367</span> |
| <span id="368">368</span> |
| <span id="369">369</span> |
| <span id="370">370</span> |
| <span id="371">371</span> |
| <span id="372">372</span> |
| <span id="373">373</span> |
| <span id="374">374</span> |
| <span id="375">375</span> |
| <span id="376">376</span> |
| <span id="377">377</span> |
| <span id="378">378</span> |
| <span id="379">379</span> |
| <span id="380">380</span> |
| <span id="381">381</span> |
| <span id="382">382</span> |
| <span id="383">383</span> |
| <span id="384">384</span> |
| <span id="385">385</span> |
| <span id="386">386</span> |
| <span id="387">387</span> |
| <span id="388">388</span> |
| <span id="389">389</span> |
| <span id="390">390</span> |
| <span id="391">391</span> |
| <span id="392">392</span> |
| <span id="393">393</span> |
| <span id="394">394</span> |
| <span id="395">395</span> |
| <span id="396">396</span> |
| <span id="397">397</span> |
| <span id="398">398</span> |
| <span id="399">399</span> |
| <span id="400">400</span> |
| <span id="401">401</span> |
| <span id="402">402</span> |
| <span id="403">403</span> |
| <span id="404">404</span> |
| <span id="405">405</span> |
| <span id="406">406</span> |
| <span id="407">407</span> |
| <span id="408">408</span> |
| <span id="409">409</span> |
| <span id="410">410</span> |
| <span id="411">411</span> |
| <span id="412">412</span> |
| <span id="413">413</span> |
| <span id="414">414</span> |
| <span id="415">415</span> |
| <span id="416">416</span> |
| <span id="417">417</span> |
| <span id="418">418</span> |
| <span id="419">419</span> |
| <span id="420">420</span> |
| <span id="421">421</span> |
| <span id="422">422</span> |
| <span id="423">423</span> |
| <span id="424">424</span> |
| <span id="425">425</span> |
| <span id="426">426</span> |
| <span id="427">427</span> |
| <span id="428">428</span> |
| <span id="429">429</span> |
| <span id="430">430</span> |
| <span id="431">431</span> |
| <span id="432">432</span> |
| <span id="433">433</span> |
| <span id="434">434</span> |
| <span id="435">435</span> |
| <span id="436">436</span> |
| <span id="437">437</span> |
| <span id="438">438</span> |
| <span id="439">439</span> |
| <span id="440">440</span> |
| <span id="441">441</span> |
| <span id="442">442</span> |
| <span id="443">443</span> |
| <span id="444">444</span> |
| <span id="445">445</span> |
| <span id="446">446</span> |
| <span id="447">447</span> |
| <span id="448">448</span> |
| <span id="449">449</span> |
| <span id="450">450</span> |
| <span id="451">451</span> |
| <span id="452">452</span> |
| <span id="453">453</span> |
| <span id="454">454</span> |
| <span id="455">455</span> |
| <span id="456">456</span> |
| <span id="457">457</span> |
| <span id="458">458</span> |
| <span id="459">459</span> |
| <span id="460">460</span> |
| <span id="461">461</span> |
| <span id="462">462</span> |
| <span id="463">463</span> |
| <span id="464">464</span> |
| <span id="465">465</span> |
| <span id="466">466</span> |
| <span id="467">467</span> |
| <span id="468">468</span> |
| <span id="469">469</span> |
| <span id="470">470</span> |
| <span id="471">471</span> |
| <span id="472">472</span> |
| <span id="473">473</span> |
| <span id="474">474</span> |
| <span id="475">475</span> |
| <span id="476">476</span> |
| </pre><pre class="rust"><code><span class="doccomment">//! Gradient Descent |
| //! |
| //! Implementation of gradient descent algorithm. Module contains |
| //! the struct `GradientDesc` which is instantiated within models |
| //! implementing the Optimizable trait. |
| //! |
| //! Currently standard batch gradient descent is the only implemented |
| //! optimization algorithm but there is flexibility to introduce new |
| //! algorithms and git them into the same scheme easily. |
| |
| </span><span class="kw">use </span>learning::optim::{Optimizable, OptimAlgorithm}; |
| <span class="kw">use </span>linalg::Vector; |
| <span class="kw">use </span>linalg::{Matrix, BaseMatrix}; |
| <span class="kw">use </span>rulinalg::utils; |
| |
| <span class="kw">use </span>learning::toolkit::rand_utils; |
| |
| <span class="kw">const </span>LEARNING_EPS: f64 = <span class="number">1e-20</span>; |
| |
| <span class="doccomment">/// Batch Gradient Descent algorithm |
| </span><span class="attribute">#[derive(Clone, Copy, Debug)] |
| </span><span class="kw">pub struct </span>GradientDesc { |
| <span class="doccomment">/// The step-size for the gradient descent steps. |
| </span>alpha: f64, |
| <span class="doccomment">/// The number of iterations to run. |
| </span>iters: usize, |
| } |
| |
| <span class="doccomment">/// The default gradient descent algorithm. |
| /// |
| /// The defaults are: |
| /// |
| /// - alpha = 0.3 |
| /// - iters = 100 |
| </span><span class="kw">impl </span>Default <span class="kw">for </span>GradientDesc { |
| <span class="kw">fn </span>default() -> GradientDesc { |
| GradientDesc { |
| alpha: <span class="number">0.3</span>, |
| iters: <span class="number">100</span>, |
| } |
| } |
| } |
| |
| <span class="kw">impl </span>GradientDesc { |
| <span class="doccomment">/// Construct a gradient descent algorithm. |
| /// |
| /// Requires the step size and iteration count |
| /// to be specified. |
| /// |
| /// # Examples |
| /// |
| /// ``` |
| /// use rusty_machine::learning::optim::grad_desc::GradientDesc; |
| /// |
| /// let gd = GradientDesc::new(0.3, 10000); |
| /// ``` |
| </span><span class="kw">pub fn </span>new(alpha: f64, iters: usize) -> GradientDesc { |
| <span class="macro">assert!</span>(alpha > <span class="number">0f64</span>, |
| <span class="string">"The step size (alpha) must be greater than 0."</span>); |
| |
| GradientDesc { |
| alpha: alpha, |
| iters: iters, |
| } |
| } |
| } |
| |
| <span class="kw">impl</span><M: Optimizable> OptimAlgorithm<M> <span class="kw">for </span>GradientDesc { |
| <span class="kw">fn </span>optimize(<span class="kw-2">&</span><span class="self">self</span>, |
| model: <span class="kw-2">&</span>M, |
| start: <span class="kw-2">&</span>[f64], |
| inputs: <span class="kw-2">&</span>M::Inputs, |
| targets: <span class="kw-2">&</span>M::Targets) |
| -> Vec<f64> { |
| |
| <span class="comment">// Create the initial optimal parameters |
| </span><span class="kw">let </span><span class="kw-2">mut </span>optimizing_val = Vector::new(start.to_vec()); |
| <span class="comment">// The cost at the start of each iteration |
| </span><span class="kw">let </span><span class="kw-2">mut </span>start_iter_cost = <span class="number">0f64</span>; |
| |
| <span class="kw">for _ in </span><span class="number">0</span>..<span class="self">self</span>.iters { |
| <span class="comment">// Compute the cost and gradient for the current parameters |
| </span><span class="kw">let </span>(cost, grad) = model.compute_grad(optimizing_val.data(), inputs, targets); |
| |
| <span class="comment">// Early stopping |
| </span><span class="kw">if </span>(start_iter_cost - cost).abs() < LEARNING_EPS { |
| <span class="kw">break</span>; |
| } <span class="kw">else </span>{ |
| <span class="comment">// Update the optimal parameters using gradient descent |
| </span>optimizing_val = <span class="kw-2">&</span>optimizing_val - Vector::new(grad) * <span class="self">self</span>.alpha; |
| <span class="comment">// Update the latest cost |
| </span>start_iter_cost = cost; |
| } |
| } |
| optimizing_val.into_vec() |
| } |
| } |
| |
| <span class="doccomment">/// Stochastic Gradient Descent algorithm. |
| /// |
| /// Uses basic momentum to control the learning rate. |
| </span><span class="attribute">#[derive(Clone, Copy, Debug)] |
| </span><span class="kw">pub struct </span>StochasticGD { |
| <span class="doccomment">/// Controls the momentum of the descent |
| </span>alpha: f64, |
| <span class="doccomment">/// The square root of the raw learning rate. |
| </span>mu: f64, |
| <span class="doccomment">/// The number of passes through the data. |
| </span>iters: usize, |
| } |
| |
| <span class="doccomment">/// The default Stochastic GD algorithm. |
| /// |
| /// The defaults are: |
| /// |
| /// - alpha = 0.1 |
| /// - mu = 0.1 |
| /// - iters = 20 |
| </span><span class="kw">impl </span>Default <span class="kw">for </span>StochasticGD { |
| <span class="kw">fn </span>default() -> StochasticGD { |
| StochasticGD { |
| alpha: <span class="number">0.1</span>, |
| mu: <span class="number">0.1</span>, |
| iters: <span class="number">20</span>, |
| } |
| } |
| } |
| |
| <span class="kw">impl </span>StochasticGD { |
| <span class="doccomment">/// Construct a stochastic gradient descent algorithm. |
| /// |
| /// Requires the learning rate, momentum rate and iteration count |
| /// to be specified. |
| /// |
| /// With Nesterov momentum by default. |
| /// |
| /// # Examples |
| /// |
| /// ``` |
| /// use rusty_machine::learning::optim::grad_desc::StochasticGD; |
| /// |
| /// let sgd = StochasticGD::new(0.1, 0.3, 5); |
| /// ``` |
| </span><span class="kw">pub fn </span>new(alpha: f64, mu: f64, iters: usize) -> StochasticGD { |
| <span class="macro">assert!</span>(alpha > <span class="number">0f64</span>, <span class="string">"The momentum (alpha) must be greater than 0."</span>); |
| <span class="macro">assert!</span>(mu > <span class="number">0f64</span>, <span class="string">"The step size (mu) must be greater than 0."</span>); |
| |
| StochasticGD { |
| alpha: alpha, |
| mu: mu, |
| iters: iters, |
| } |
| } |
| } |
| |
| <span class="kw">impl</span><M> OptimAlgorithm<M> <span class="kw">for </span>StochasticGD |
| <span class="kw">where </span>M: Optimizable<Inputs = Matrix<f64>, Targets = Matrix<f64>> |
| { |
| <span class="kw">fn </span>optimize(<span class="kw-2">&</span><span class="self">self</span>, |
| model: <span class="kw-2">&</span>M, |
| start: <span class="kw-2">&</span>[f64], |
| inputs: <span class="kw-2">&</span>M::Inputs, |
| targets: <span class="kw-2">&</span>M::Targets) |
| -> Vec<f64> { |
| |
| <span class="comment">// Create the initial optimal parameters |
| </span><span class="kw">let </span><span class="kw-2">mut </span>optimizing_val = Vector::new(start.to_vec()); |
| <span class="comment">// Create the momentum based gradient distance |
| </span><span class="kw">let </span><span class="kw-2">mut </span>delta_w = Vector::zeros(start.len()); |
| |
| <span class="comment">// Set up the indices for permutation |
| </span><span class="kw">let </span><span class="kw-2">mut </span>permutation = (<span class="number">0</span>..inputs.rows()).collect::<Vec<<span class="kw">_</span>>>(); |
| <span class="comment">// The cost at the start of each iteration |
| </span><span class="kw">let </span><span class="kw-2">mut </span>start_iter_cost = <span class="number">0f64</span>; |
| |
| <span class="kw">for _ in </span><span class="number">0</span>..<span class="self">self</span>.iters { |
| <span class="comment">// The cost at the end of each stochastic gd pass |
| </span><span class="kw">let </span><span class="kw-2">mut </span>end_cost = <span class="number">0f64</span>; |
| <span class="comment">// Permute the indices |
| </span>rand_utils::in_place_fisher_yates(<span class="kw-2">&mut </span>permutation); |
| <span class="kw">for </span>i <span class="kw">in </span><span class="kw-2">&</span>permutation { |
| <span class="comment">// Compute the cost and gradient for this data pair |
| </span><span class="kw">let </span>(cost, vec_data) = model.compute_grad(optimizing_val.data(), |
| <span class="kw-2">&</span>inputs.select_rows(<span class="kw-2">&</span>[<span class="kw-2">*</span>i]), |
| <span class="kw-2">&</span>targets.select_rows(<span class="kw-2">&</span>[<span class="kw-2">*</span>i])); |
| |
| <span class="comment">// Backup previous velocity |
| </span><span class="kw">let </span>prev_w = delta_w.clone(); |
| <span class="comment">// Compute the difference in gradient using Nesterov momentum |
| </span>delta_w = Vector::new(vec_data) * <span class="self">self</span>.mu + <span class="kw-2">&</span>delta_w * <span class="self">self</span>.alpha; |
| <span class="comment">// Update the parameters |
| </span>optimizing_val = <span class="kw-2">&</span>optimizing_val - |
| (<span class="kw-2">&</span>prev_w * (-<span class="self">self</span>.alpha) + <span class="kw-2">&</span>delta_w * (<span class="number">1. </span>+ <span class="self">self</span>.alpha)); |
| <span class="comment">// Set the end cost (this is only used after the last iteration) |
| </span>end_cost += cost; |
| } |
| |
| end_cost /= inputs.rows() <span class="kw">as </span>f64; |
| |
| <span class="comment">// Early stopping |
| </span><span class="kw">if </span>(start_iter_cost - end_cost).abs() < LEARNING_EPS { |
| <span class="kw">break</span>; |
| } <span class="kw">else </span>{ |
| <span class="comment">// Update the cost |
| </span>start_iter_cost = end_cost; |
| } |
| } |
| optimizing_val.into_vec() |
| } |
| } |
| |
| <span class="doccomment">/// Adaptive Gradient Descent |
| /// |
| /// The adaptive gradient descent algorithm (Duchi et al. 2010). |
| </span><span class="attribute">#[derive(Debug)] |
| </span><span class="kw">pub struct </span>AdaGrad { |
| alpha: f64, |
| tau: f64, |
| iters: usize, |
| } |
| |
| <span class="kw">impl </span>AdaGrad { |
| <span class="doccomment">/// Constructs a new AdaGrad algorithm. |
| /// |
| /// # Examples |
| /// |
| /// ``` |
| /// use rusty_machine::learning::optim::grad_desc::AdaGrad; |
| /// |
| /// // Create a new AdaGrad algorithm with step size 0.5 |
| /// // and adaptive scaling constant 1.0 |
| /// let gd = AdaGrad::new(0.5, 1.0, 100); |
| /// ``` |
| </span><span class="kw">pub fn </span>new(alpha: f64, tau: f64, iters: usize) -> AdaGrad { |
| <span class="macro">assert!</span>(alpha > <span class="number">0f64</span>, |
| <span class="string">"The step size (alpha) must be greater than 0."</span>); |
| <span class="macro">assert!</span>(tau >= <span class="number">0f64</span>, |
| <span class="string">"The adaptive constant (tau) cannot be negative."</span>); |
| AdaGrad { |
| alpha: alpha, |
| tau: tau, |
| iters: iters, |
| } |
| } |
| } |
| |
| <span class="kw">impl </span>Default <span class="kw">for </span>AdaGrad { |
| <span class="kw">fn </span>default() -> AdaGrad { |
| AdaGrad { |
| alpha: <span class="number">1f64</span>, |
| tau: <span class="number">3f64</span>, |
| iters: <span class="number">100</span>, |
| } |
| } |
| } |
| |
| <span class="kw">impl</span><M: Optimizable<Inputs = Matrix<f64>, Targets = Matrix<f64>>> OptimAlgorithm<M> <span class="kw">for </span>AdaGrad { |
| <span class="kw">fn </span>optimize(<span class="kw-2">&</span><span class="self">self</span>, |
| model: <span class="kw-2">&</span>M, |
| start: <span class="kw-2">&</span>[f64], |
| inputs: <span class="kw-2">&</span>M::Inputs, |
| targets: <span class="kw-2">&</span>M::Targets) |
| -> Vec<f64> { |
| |
| <span class="comment">// Initialize the adaptive scaling |
| </span><span class="kw">let </span><span class="kw-2">mut </span>ada_s = Vector::zeros(start.len()); |
| <span class="comment">// Initialize the optimal parameters |
| </span><span class="kw">let </span><span class="kw-2">mut </span>optimizing_val = Vector::new(start.to_vec()); |
| |
| <span class="comment">// Set up the indices for permutation |
| </span><span class="kw">let </span><span class="kw-2">mut </span>permutation = (<span class="number">0</span>..inputs.rows()).collect::<Vec<<span class="kw">_</span>>>(); |
| <span class="comment">// The cost at the start of each iteration |
| </span><span class="kw">let </span><span class="kw-2">mut </span>start_iter_cost = <span class="number">0f64</span>; |
| |
| <span class="kw">for _ in </span><span class="number">0</span>..<span class="self">self</span>.iters { |
| <span class="comment">// The cost at the end of each stochastic gd pass |
| </span><span class="kw">let </span><span class="kw-2">mut </span>end_cost = <span class="number">0f64</span>; |
| <span class="comment">// Permute the indices |
| </span>rand_utils::in_place_fisher_yates(<span class="kw-2">&mut </span>permutation); |
| <span class="kw">for </span>i <span class="kw">in </span><span class="kw-2">&</span>permutation { |
| <span class="comment">// Compute the cost and gradient for this data pair |
| </span><span class="kw">let </span>(cost, <span class="kw-2">mut </span>vec_data) = model.compute_grad(optimizing_val.data(), |
| <span class="kw-2">&</span>inputs.select_rows(<span class="kw-2">&</span>[<span class="kw-2">*</span>i]), |
| <span class="kw-2">&</span>targets.select_rows(<span class="kw-2">&</span>[<span class="kw-2">*</span>i])); |
| <span class="comment">// Update the adaptive scaling by adding the gradient squared |
| </span>utils::in_place_vec_bin_op(ada_s.mut_data(), <span class="kw-2">&</span>vec_data, |x, <span class="kw-2">&</span>y| <span class="kw-2">*</span>x += y * y); |
| |
| <span class="comment">// Compute the change in gradient |
| </span>utils::in_place_vec_bin_op(<span class="kw-2">&mut </span>vec_data, ada_s.data(), |x, <span class="kw-2">&</span>y| { |
| <span class="kw-2">*</span>x = <span class="self">self</span>.alpha * (<span class="kw-2">*</span>x / (<span class="self">self</span>.tau + (y).sqrt())) |
| }); |
| <span class="comment">// Update the parameters |
| </span>optimizing_val = <span class="kw-2">&</span>optimizing_val - Vector::new(vec_data); |
| <span class="comment">// Set the end cost (this is only used after the last iteration) |
| </span>end_cost += cost; |
| } |
| end_cost /= inputs.rows() <span class="kw">as </span>f64; |
| |
| <span class="comment">// Early stopping |
| </span><span class="kw">if </span>(start_iter_cost - end_cost).abs() < LEARNING_EPS { |
| <span class="kw">break</span>; |
| } <span class="kw">else </span>{ |
| <span class="comment">// Update the cost |
| </span>start_iter_cost = end_cost; |
| } |
| } |
| optimizing_val.into_vec() |
| } |
| } |
| |
| <span class="doccomment">/// RMSProp |
| /// |
| /// The RMSProp algorithm (Hinton et al. 2012). |
| </span><span class="attribute">#[derive(Debug, Clone, Copy)] |
| </span><span class="kw">pub struct </span>RMSProp { |
| <span class="doccomment">/// The base step size of gradient descent steps |
| </span>learning_rate: f64, |
| <span class="doccomment">/// Rate at which running total of average square gradients decays |
| </span>decay_rate: f64, |
| <span class="doccomment">/// Small value used to avoid divide by zero |
| </span>epsilon: f64, |
| <span class="doccomment">/// The number of passes through the data |
| </span>iters: usize, |
| } |
| |
| <span class="doccomment">/// The default RMSProp configuration |
| /// |
| /// The defaults are: |
| /// |
| /// - learning_rate = 0.01 |
| /// - decay_rate = 0.9 |
| /// - epsilon = 1.0e-5 |
| /// - iters = 50 |
| </span><span class="kw">impl </span>Default <span class="kw">for </span>RMSProp { |
| <span class="kw">fn </span>default() -> RMSProp { |
| RMSProp { |
| learning_rate: <span class="number">0.01</span>, |
| decay_rate: <span class="number">0.9</span>, |
| epsilon: <span class="number">1.0e-5</span>, |
| iters: <span class="number">50 |
| </span>} |
| } |
| } |
| |
| <span class="kw">impl </span>RMSProp { |
| <span class="doccomment">/// Construct an RMSProp algorithm. |
| /// |
| /// Requires learning rate, decay rate, epsilon, and iteration count. |
| /// |
| /// #Examples |
| /// |
| /// ``` |
| /// use rusty_machine::learning::optim::grad_desc::RMSProp; |
| /// |
| /// let rms = RMSProp::new(0.99, 0.01, 1e-5, 20); |
| /// ``` |
| </span><span class="kw">pub fn </span>new(learning_rate: f64, decay_rate: f64, epsilon: f64, iters: usize) -> RMSProp { |
| <span class="macro">assert!</span>(<span class="number">0f64 </span>< learning_rate, <span class="string">"The learning rate must be positive"</span>); |
| <span class="macro">assert!</span>(<span class="number">0f64 </span>< decay_rate && decay_rate < <span class="number">1f64</span>, <span class="string">"The decay rate must be between 0 and 1"</span>); |
| <span class="macro">assert!</span>(<span class="number">0f64 </span>< epsilon, <span class="string">"Epsilon must be positive"</span>); |
| |
| RMSProp { |
| decay_rate: decay_rate, |
| learning_rate: learning_rate, |
| epsilon: epsilon, |
| iters: iters |
| } |
| } |
| } |
| |
| <span class="kw">impl</span><M> OptimAlgorithm<M> <span class="kw">for </span>RMSProp |
| <span class="kw">where </span>M: Optimizable<Inputs = Matrix<f64>, Targets = Matrix<f64>> { |
| <span class="kw">fn </span>optimize(<span class="kw-2">&</span><span class="self">self</span>, |
| model: <span class="kw-2">&</span>M, |
| start: <span class="kw-2">&</span>[f64], |
| inputs: <span class="kw-2">&</span>M::Inputs, |
| targets: <span class="kw-2">&</span>M::Targets) |
| -> Vec<f64> { |
| <span class="comment">// Initial parameters |
| </span><span class="kw">let </span><span class="kw-2">mut </span>params = Vector::new(start.to_vec()); |
| <span class="comment">// Running average of squared gradients |
| </span><span class="kw">let </span><span class="kw-2">mut </span>rmsprop_cache = Vector::zeros(start.len()); |
| |
| <span class="comment">// Set up indices for permutation |
| </span><span class="kw">let </span><span class="kw-2">mut </span>permutation = (<span class="number">0</span>..inputs.rows()).collect::<Vec<<span class="kw">_</span>>>(); |
| <span class="comment">// The cost from the previous iteration |
| </span><span class="kw">let </span><span class="kw-2">mut </span>prev_cost = <span class="number">0f64</span>; |
| |
| <span class="kw">for _ in </span><span class="number">0</span>..<span class="self">self</span>.iters { |
| <span class="comment">// The cost at end of each pass |
| </span><span class="kw">let </span><span class="kw-2">mut </span>end_cost = <span class="number">0f64</span>; |
| <span class="comment">// Permute the vertices |
| </span>rand_utils::in_place_fisher_yates(<span class="kw-2">&mut </span>permutation); |
| <span class="kw">for </span>i <span class="kw">in </span><span class="kw-2">&</span>permutation { |
| <span class="kw">let </span>(cost, grad) = model.compute_grad(params.data(), |
| <span class="kw-2">&</span>inputs.select_rows(<span class="kw-2">&</span>[<span class="kw-2">*</span>i]), |
| <span class="kw-2">&</span>targets.select_rows(<span class="kw-2">&</span>[<span class="kw-2">*</span>i])); |
| |
| <span class="kw">let </span><span class="kw-2">mut </span>grad = Vector::new(grad); |
| <span class="kw">let </span>grad_squared = grad.clone().apply(<span class="kw-2">&</span>|x| x<span class="kw-2">*</span>x); |
| <span class="comment">// Update cached average of squared gradients |
| </span>rmsprop_cache = <span class="kw-2">&</span>rmsprop_cache<span class="kw-2">*</span><span class="self">self</span>.decay_rate + <span class="kw-2">&</span>grad_squared<span class="kw-2">*</span>(<span class="number">1.0 </span>- <span class="self">self</span>.decay_rate); |
| <span class="comment">// RMSProp update rule |
| </span>utils::in_place_vec_bin_op(grad.mut_data(), rmsprop_cache.data(), |x, <span class="kw-2">&</span>y| { |
| <span class="kw-2">*</span>x = <span class="kw-2">*</span>x * <span class="self">self</span>.learning_rate / (y + <span class="self">self</span>.epsilon).sqrt(); |
| }); |
| params = <span class="kw-2">&</span>params - <span class="kw-2">&</span>grad; |
| |
| end_cost += cost; |
| } |
| end_cost /= inputs.rows() <span class="kw">as </span>f64; |
| |
| <span class="comment">// Early stopping |
| </span><span class="kw">if </span>(prev_cost - end_cost).abs() < LEARNING_EPS { |
| <span class="kw">break</span>; |
| } <span class="kw">else </span>{ |
| prev_cost = end_cost; |
| } |
| } |
| params.into_vec() |
| } |
| } |
| |
| <span class="attribute">#[cfg(test)] |
| </span><span class="kw">mod </span>tests { |
| |
| <span class="kw">use super</span>::{GradientDesc, StochasticGD, AdaGrad, RMSProp}; |
| |
| <span class="attribute">#[test] |
| #[should_panic] |
| </span><span class="kw">fn </span>gd_neg_stepsize() { |
| <span class="kw">let _ </span>= GradientDesc::new(-<span class="number">0.5</span>, <span class="number">0</span>); |
| } |
| |
| <span class="attribute">#[test] |
| #[should_panic] |
| </span><span class="kw">fn </span>stochastic_gd_neg_momentum() { |
| <span class="kw">let _ </span>= StochasticGD::new(-<span class="number">0.5</span>, <span class="number">1f64</span>, <span class="number">0</span>); |
| } |
| |
| <span class="attribute">#[test] |
| #[should_panic] |
| </span><span class="kw">fn </span>stochastic_gd_neg_stepsize() { |
| <span class="kw">let _ </span>= StochasticGD::new(<span class="number">0.5</span>, -<span class="number">1f64</span>, <span class="number">0</span>); |
| } |
| |
| <span class="attribute">#[test] |
| #[should_panic] |
| </span><span class="kw">fn </span>adagrad_neg_stepsize() { |
| <span class="kw">let _ </span>= AdaGrad::new(-<span class="number">0.5</span>, <span class="number">1f64</span>, <span class="number">0</span>); |
| } |
| |
| <span class="attribute">#[test] |
| #[should_panic] |
| </span><span class="kw">fn </span>adagrad_neg_adaptive_scale() { |
| <span class="kw">let _ </span>= AdaGrad::new(<span class="number">0.5</span>, -<span class="number">1f64</span>, <span class="number">0</span>); |
| } |
| |
| <span class="attribute">#[test] |
| #[should_panic] |
| </span><span class="kw">fn </span>rmsprop_neg_decay_rate() { |
| <span class="kw">let _ </span>= RMSProp::new(-<span class="number">0.5</span>, <span class="number">0.005</span>, <span class="number">1.0e-5</span>, <span class="number">0</span>); |
| } |
| |
| <span class="attribute">#[test] |
| #[should_panic] |
| </span><span class="kw">fn </span>rmsprop_neg_epsilon() { |
| <span class="kw">let _ </span>= RMSProp::new(<span class="number">0.5</span>, <span class="number">0.005</span>, -<span class="number">1.0e-5</span>, <span class="number">0</span>); |
| } |
| |
| <span class="attribute">#[test] |
| #[should_panic] |
| </span><span class="kw">fn </span>rmsprop_neg_learning_rate() { |
| <span class="kw">let _ </span>= RMSProp::new(<span class="number">0.5</span>, -<span class="number">0.005</span>, <span class="number">1.0e-5</span>, <span class="number">0</span>); |
| } |
| } |
| </code></pre></div> |
| </section></div></main><div id="rustdoc-vars" data-root-path="../../../../" data-current-crate="rusty_machine" data-themes="ayu,dark,light" data-resource-suffix="" data-rustdoc-version="1.66.0-nightly (5c8bff74b 2022-10-21)" ></div></body></html> |