blob: a39b964391ab9141901b6e2c02ae8eeb22aecdfe [file] [log] [blame]
<!DOCTYPE html><html lang="en"><head><meta charset="utf-8"><meta name="viewport" content="width=device-width, initial-scale=1.0"><meta name="generator" content="rustdoc"><meta name="description" content="Source of the Rust file `/root/.cargo/git/checkouts/incubator-teaclave-crates-c8106113f74feefc/ede1f68/rusty-machine/src/learning/nnet/net_layer.rs`."><meta name="keywords" content="rust, rustlang, rust-lang"><title>net_layer.rs - source</title><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../../SourceSerif4-Regular.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../../FiraSans-Regular.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../../FiraSans-Medium.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../../SourceCodePro-Regular.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../../SourceSerif4-Bold.ttf.woff2"><link rel="preload" as="font" type="font/woff2" crossorigin href="../../../../SourceCodePro-Semibold.ttf.woff2"><link rel="stylesheet" href="../../../../normalize.css"><link rel="stylesheet" href="../../../../rustdoc.css" id="mainThemeStyle"><link rel="stylesheet" href="../../../../ayu.css" disabled><link rel="stylesheet" href="../../../../dark.css" disabled><link rel="stylesheet" href="../../../../light.css" id="themeStyle"><script id="default-settings" ></script><script src="../../../../storage.js"></script><script defer src="../../../../source-script.js"></script><script defer src="../../../../source-files.js"></script><script defer src="../../../../main.js"></script><noscript><link rel="stylesheet" href="../../../../noscript.css"></noscript><link rel="alternate icon" type="image/png" href="../../../../favicon-16x16.png"><link rel="alternate icon" type="image/png" href="../../../../favicon-32x32.png"><link rel="icon" type="image/svg+xml" href="../../../../favicon.svg"></head><body class="rustdoc source"><!--[if lte IE 11]><div class="warning">This old browser is unsupported and will most likely display funky things.</div><![endif]--><nav class="sidebar"><a class="sidebar-logo" href="../../../../rusty_machine/index.html"><div class="logo-container"><img class="rust-logo" src="../../../../rust-logo.svg" alt="logo"></div></a></nav><main><div class="width-limiter"><nav class="sub"><a class="sub-logo-container" href="../../../../rusty_machine/index.html"><img class="rust-logo" src="../../../../rust-logo.svg" alt="logo"></a><form class="search-form"><div class="search-container"><span></span><input class="search-input" name="search" autocomplete="off" spellcheck="false" placeholder="Click or press ‘S’ to search, ‘?’ for more options…" type="search"><div id="help-button" title="help" tabindex="-1"><a href="../../../../help.html">?</a></div><div id="settings-menu" tabindex="-1"><a href="../../../../settings.html" title="settings"><img width="22" height="22" alt="Change settings" src="../../../../wheel.svg"></a></div></div></form></nav><section id="main-content" class="content"><div class="example-wrap"><pre class="src-line-numbers"><span id="1">1</span>
<span id="2">2</span>
<span id="3">3</span>
<span id="4">4</span>
<span id="5">5</span>
<span id="6">6</span>
<span id="7">7</span>
<span id="8">8</span>
<span id="9">9</span>
<span id="10">10</span>
<span id="11">11</span>
<span id="12">12</span>
<span id="13">13</span>
<span id="14">14</span>
<span id="15">15</span>
<span id="16">16</span>
<span id="17">17</span>
<span id="18">18</span>
<span id="19">19</span>
<span id="20">20</span>
<span id="21">21</span>
<span id="22">22</span>
<span id="23">23</span>
<span id="24">24</span>
<span id="25">25</span>
<span id="26">26</span>
<span id="27">27</span>
<span id="28">28</span>
<span id="29">29</span>
<span id="30">30</span>
<span id="31">31</span>
<span id="32">32</span>
<span id="33">33</span>
<span id="34">34</span>
<span id="35">35</span>
<span id="36">36</span>
<span id="37">37</span>
<span id="38">38</span>
<span id="39">39</span>
<span id="40">40</span>
<span id="41">41</span>
<span id="42">42</span>
<span id="43">43</span>
<span id="44">44</span>
<span id="45">45</span>
<span id="46">46</span>
<span id="47">47</span>
<span id="48">48</span>
<span id="49">49</span>
<span id="50">50</span>
<span id="51">51</span>
<span id="52">52</span>
<span id="53">53</span>
<span id="54">54</span>
<span id="55">55</span>
<span id="56">56</span>
<span id="57">57</span>
<span id="58">58</span>
<span id="59">59</span>
<span id="60">60</span>
<span id="61">61</span>
<span id="62">62</span>
<span id="63">63</span>
<span id="64">64</span>
<span id="65">65</span>
<span id="66">66</span>
<span id="67">67</span>
<span id="68">68</span>
<span id="69">69</span>
<span id="70">70</span>
<span id="71">71</span>
<span id="72">72</span>
<span id="73">73</span>
<span id="74">74</span>
<span id="75">75</span>
<span id="76">76</span>
<span id="77">77</span>
<span id="78">78</span>
<span id="79">79</span>
<span id="80">80</span>
<span id="81">81</span>
<span id="82">82</span>
<span id="83">83</span>
<span id="84">84</span>
<span id="85">85</span>
<span id="86">86</span>
<span id="87">87</span>
<span id="88">88</span>
<span id="89">89</span>
<span id="90">90</span>
<span id="91">91</span>
<span id="92">92</span>
<span id="93">93</span>
<span id="94">94</span>
<span id="95">95</span>
<span id="96">96</span>
<span id="97">97</span>
<span id="98">98</span>
<span id="99">99</span>
<span id="100">100</span>
<span id="101">101</span>
<span id="102">102</span>
<span id="103">103</span>
<span id="104">104</span>
<span id="105">105</span>
<span id="106">106</span>
<span id="107">107</span>
<span id="108">108</span>
<span id="109">109</span>
<span id="110">110</span>
<span id="111">111</span>
<span id="112">112</span>
<span id="113">113</span>
<span id="114">114</span>
<span id="115">115</span>
<span id="116">116</span>
<span id="117">117</span>
<span id="118">118</span>
<span id="119">119</span>
<span id="120">120</span>
<span id="121">121</span>
<span id="122">122</span>
<span id="123">123</span>
<span id="124">124</span>
<span id="125">125</span>
<span id="126">126</span>
<span id="127">127</span>
<span id="128">128</span>
<span id="129">129</span>
<span id="130">130</span>
<span id="131">131</span>
<span id="132">132</span>
<span id="133">133</span>
<span id="134">134</span>
<span id="135">135</span>
<span id="136">136</span>
<span id="137">137</span>
<span id="138">138</span>
<span id="139">139</span>
<span id="140">140</span>
<span id="141">141</span>
<span id="142">142</span>
<span id="143">143</span>
<span id="144">144</span>
<span id="145">145</span>
<span id="146">146</span>
<span id="147">147</span>
<span id="148">148</span>
<span id="149">149</span>
<span id="150">150</span>
<span id="151">151</span>
<span id="152">152</span>
<span id="153">153</span>
<span id="154">154</span>
<span id="155">155</span>
<span id="156">156</span>
<span id="157">157</span>
<span id="158">158</span>
<span id="159">159</span>
<span id="160">160</span>
<span id="161">161</span>
<span id="162">162</span>
<span id="163">163</span>
<span id="164">164</span>
<span id="165">165</span>
<span id="166">166</span>
<span id="167">167</span>
<span id="168">168</span>
<span id="169">169</span>
<span id="170">170</span>
<span id="171">171</span>
<span id="172">172</span>
<span id="173">173</span>
<span id="174">174</span>
<span id="175">175</span>
<span id="176">176</span>
<span id="177">177</span>
<span id="178">178</span>
<span id="179">179</span>
<span id="180">180</span>
<span id="181">181</span>
<span id="182">182</span>
<span id="183">183</span>
<span id="184">184</span>
<span id="185">185</span>
</pre><pre class="rust"><code><span class="doccomment">//! Neural Network Layers
</span><span class="kw">use </span>linalg::{Matrix, MatrixSlice, BaseMatrix};
<span class="kw">use </span>learning::LearningResult;
<span class="kw">use </span>learning::error::{Error, ErrorKind};
<span class="kw">use </span>learning::toolkit::activ_fn::ActivationFunc;
<span class="kw">use </span>rand::thread_rng;
<span class="kw">use </span>rand::distributions::Distribution;
<span class="kw">use </span>rand_distr::Normal;
<span class="kw">use </span>std::fmt::Debug;
<span class="doccomment">/// Trait for neural net layers
</span><span class="kw">pub trait </span>NetLayer : Debug {
<span class="doccomment">/// The result of propogating data forward through this layer
</span><span class="kw">fn </span>forward(<span class="kw-2">&amp;</span><span class="self">self</span>, input: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, params: MatrixSlice&lt;f64&gt;) -&gt; LearningResult&lt;Matrix&lt;f64&gt;&gt;;
<span class="doccomment">/// The gradient of the output of this layer with respect to its input
</span><span class="kw">fn </span>back_input(<span class="kw-2">&amp;</span><span class="self">self</span>, out_grad: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, input: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, output: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, params: MatrixSlice&lt;f64&gt;) -&gt; Matrix&lt;f64&gt;;
<span class="doccomment">/// The gradient of the output of this layer with respect to its parameters
</span><span class="kw">fn </span>back_params(<span class="kw-2">&amp;</span><span class="self">self</span>, out_grad: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, input: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, output: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, params: MatrixSlice&lt;f64&gt;) -&gt; Matrix&lt;f64&gt;;
<span class="doccomment">/// The default value of the parameters of this layer before training
</span><span class="kw">fn </span>default_params(<span class="kw-2">&amp;</span><span class="self">self</span>) -&gt; Vec&lt;f64&gt;;
<span class="doccomment">/// The shape of the parameters used by this layer
</span><span class="kw">fn </span>param_shape(<span class="kw-2">&amp;</span><span class="self">self</span>) -&gt; (usize, usize);
<span class="doccomment">/// The number of parameters used by this layer
</span><span class="kw">fn </span>num_params(<span class="kw-2">&amp;</span><span class="self">self</span>) -&gt; usize {
<span class="kw">let </span>shape = <span class="self">self</span>.param_shape();
shape.<span class="number">0 </span>* shape.<span class="number">1
</span>}
}
<span class="doccomment">/// Linear network layer
///
/// Represents a fully connected layer with optional bias term
///
/// The parameters are a matrix of weights of size I x N
/// where N is the dimensionality of the output and I the dimensionality of the input
</span><span class="attribute">#[derive(Debug, Clone, Copy)]
</span><span class="kw">pub struct </span>Linear {
<span class="doccomment">/// The number of dimensions of the input
</span>input_size: usize,
<span class="doccomment">/// The number of dimensions of the output
</span>output_size: usize,
<span class="doccomment">/// Whether or not to include a bias term
</span>has_bias: bool,
}
<span class="kw">impl </span>Linear {
<span class="doccomment">/// Construct a new Linear layer
</span><span class="kw">pub fn </span>new(input_size: usize, output_size: usize) -&gt; Linear {
Linear {
input_size: input_size + <span class="number">1</span>,
output_size: output_size,
has_bias: <span class="bool-val">true
</span>}
}
<span class="doccomment">/// Construct a Linear layer without a bias term
</span><span class="kw">pub fn </span>without_bias(input_size: usize, output_size: usize) -&gt; Linear {
Linear {
input_size: input_size,
output_size: output_size,
has_bias: <span class="bool-val">false
</span>}
}
}
<span class="kw">fn </span>remove_first_col(mat: Matrix&lt;f64&gt;) -&gt; Matrix&lt;f64&gt;
{
<span class="kw">let </span>rows = mat.rows();
<span class="kw">let </span>cols = mat.cols();
<span class="kw">let </span><span class="kw-2">mut </span>data = mat.into_vec();
<span class="kw">let </span>len = data.len();
<span class="kw">let </span><span class="kw-2">mut </span>del = <span class="number">0</span>;
{
<span class="kw">let </span>v = <span class="kw-2">&amp;mut *</span>data;
<span class="kw">for </span>i <span class="kw">in </span><span class="number">0</span>..len {
<span class="kw">if </span>i % cols == <span class="number">0 </span>{
del += <span class="number">1</span>;
} <span class="kw">else if </span>del &gt; <span class="number">0 </span>{
v[i - del] = v[i];
}
}
}
<span class="kw">if </span>del &gt; <span class="number">0 </span>{
data.truncate(len - del);
}
Matrix::new(rows, cols - <span class="number">1</span>, data)
}
<span class="kw">impl </span>NetLayer <span class="kw">for </span>Linear {
<span class="doccomment">/// Computes a matrix product
///
/// input should have dimensions N x I
/// where N is the number of samples and I is the dimensionality of the input
</span><span class="kw">fn </span>forward(<span class="kw-2">&amp;</span><span class="self">self</span>, input: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, params: MatrixSlice&lt;f64&gt;) -&gt; LearningResult&lt;Matrix&lt;f64&gt;&gt; {
<span class="kw">if </span><span class="self">self</span>.has_bias {
<span class="kw">if </span>input.cols()+<span class="number">1 </span>!= params.rows() {
<span class="prelude-val">Err</span>(Error::new(ErrorKind::InvalidData, <span class="string">&quot;The input had the wrong number of columns&quot;</span>))
} <span class="kw">else </span>{
<span class="prelude-val">Ok</span>(<span class="kw-2">&amp;</span>Matrix::ones(input.rows(), <span class="number">1</span>).hcat(input) * <span class="kw-2">&amp;</span>params)
}
} <span class="kw">else </span>{
<span class="kw">if </span>input.cols() != params.rows() {
<span class="prelude-val">Err</span>(Error::new(ErrorKind::InvalidData, <span class="string">&quot;The input had the wrong number of columns&quot;</span>))
} <span class="kw">else </span>{
<span class="prelude-val">Ok</span>(input * <span class="kw-2">&amp;</span>params)
}
}
}
<span class="kw">fn </span>back_input(<span class="kw-2">&amp;</span><span class="self">self</span>, out_grad: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, <span class="kw">_</span>: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, <span class="kw">_</span>: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, params: MatrixSlice&lt;f64&gt;) -&gt; Matrix&lt;f64&gt; {
<span class="macro">debug_assert_eq!</span>(out_grad.cols(), params.cols());
<span class="kw">let </span>gradient = out_grad * <span class="kw-2">&amp;</span>params.transpose();
<span class="kw">if </span><span class="self">self</span>.has_bias {
remove_first_col(gradient)
} <span class="kw">else </span>{
gradient
}
}
<span class="kw">fn </span>back_params(<span class="kw-2">&amp;</span><span class="self">self</span>, out_grad: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, input: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, <span class="kw">_</span>: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, <span class="kw">_</span>: MatrixSlice&lt;f64&gt;) -&gt; Matrix&lt;f64&gt; {
<span class="macro">debug_assert_eq!</span>(input.rows(), out_grad.rows());
<span class="kw">if </span><span class="self">self</span>.has_bias {
<span class="kw-2">&amp;</span>Matrix::ones(input.rows(), <span class="number">1</span>).hcat(input).transpose() * out_grad
} <span class="kw">else </span>{
<span class="kw-2">&amp;</span>input.transpose() * out_grad
}
}
<span class="doccomment">/// Initializes weights using Xavier initialization
///
/// weights drawn from gaussian distribution with 0 mean and variance 2/(input_size+output_size)
</span><span class="kw">fn </span>default_params(<span class="kw-2">&amp;</span><span class="self">self</span>) -&gt; Vec&lt;f64&gt; {
<span class="kw">let </span><span class="kw-2">mut </span>distro = Normal::new(<span class="number">0.0</span>, (<span class="number">2.0</span>/(<span class="self">self</span>.input_size+<span class="self">self</span>.output_size) <span class="kw">as </span>f64).sqrt()).unwrap();
<span class="kw">let </span><span class="kw-2">mut </span>rng = thread_rng();
(<span class="number">0</span>..<span class="self">self</span>.input_size<span class="kw-2">*</span><span class="self">self</span>.output_size).map(|<span class="kw">_</span>| distro.sample(<span class="kw-2">&amp;mut </span>rng))
.collect()
}
<span class="kw">fn </span>param_shape(<span class="kw-2">&amp;</span><span class="self">self</span>) -&gt; (usize, usize) {
(<span class="self">self</span>.input_size, <span class="self">self</span>.output_size)
}
}
<span class="kw">impl</span>&lt;T: ActivationFunc&gt; NetLayer <span class="kw">for </span>T {
<span class="doccomment">/// Applies the activation function to each element of the input
</span><span class="kw">fn </span>forward(<span class="kw-2">&amp;</span><span class="self">self</span>, input: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, <span class="kw">_</span>: MatrixSlice&lt;f64&gt;) -&gt; LearningResult&lt;Matrix&lt;f64&gt;&gt; {
<span class="kw">let </span><span class="kw-2">mut </span>output = Vec::with_capacity(input.rows()<span class="kw-2">*</span>input.cols());
<span class="kw">for </span>val <span class="kw">in </span>input.data() {
output.push(T::func(<span class="kw-2">*</span>val));
}
<span class="prelude-val">Ok</span>(Matrix::new(input.rows(), input.cols(), output))
}
<span class="kw">fn </span>back_input(<span class="kw-2">&amp;</span><span class="self">self</span>, out_grad: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, <span class="kw">_</span>: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, output: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, <span class="kw">_</span>: MatrixSlice&lt;f64&gt;) -&gt; Matrix&lt;f64&gt; {
<span class="kw">let </span><span class="kw-2">mut </span>in_grad = Vec::with_capacity(output.rows()<span class="kw-2">*</span>output.cols());
<span class="kw">for </span>(y, g) <span class="kw">in </span>output.data().iter().zip(out_grad.data()) {
in_grad.push(T::func_grad_from_output(<span class="kw-2">*</span>y) * g);
}
Matrix::new(output.rows(), output.cols(), in_grad)
}
<span class="kw">fn </span>back_params(<span class="kw-2">&amp;</span><span class="self">self</span>, <span class="kw">_</span>: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, <span class="kw">_</span>: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, <span class="kw">_</span>: <span class="kw-2">&amp;</span>Matrix&lt;f64&gt;, <span class="kw">_</span>: MatrixSlice&lt;f64&gt;) -&gt; Matrix&lt;f64&gt; {
Matrix::new(<span class="number">0</span>, <span class="number">0</span>, Vec::new())
}
<span class="kw">fn </span>default_params(<span class="kw-2">&amp;</span><span class="self">self</span>) -&gt; Vec&lt;f64&gt; {
Vec::new()
}
<span class="kw">fn </span>param_shape(<span class="kw-2">&amp;</span><span class="self">self</span>) -&gt; (usize, usize) {
(<span class="number">0</span>, <span class="number">0</span>)
}
}
</code></pre></div>
</section></div></main><div id="rustdoc-vars" data-root-path="../../../../" data-current-crate="rusty_machine" data-themes="ayu,dark,light" data-resource-suffix="" data-rustdoc-version="1.66.0-nightly (5c8bff74b 2022-10-21)" ></div></body></html>