blob: e518965a8f5c0532b2324e2c1c230977b41dfbda [file] [log] [blame]
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8"/>
<meta content="IE=edge" http-equiv="X-UA-Compatible"/>
<meta content="width=device-width, initial-scale=1" name="viewport"/>
<title>Autograd Package — mxnet documentation</title>
<link crossorigin="anonymous" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css" integrity="sha384-1q8mTJOASx8j1Au+a5WDVnPi2lkFfwwEAa8hDDdjZlpLegxhjVME1fgjWPGmkzs7" rel="stylesheet"/>
<link href="https://maxcdn.bootstrapcdn.com/font-awesome/4.5.0/css/font-awesome.min.css" rel="stylesheet"/>
<link href="../../../_static/basic.css" rel="stylesheet" type="text/css"/>
<link href="../../../_static/pygments.css" rel="stylesheet" type="text/css"/>
<link href="../../../_static/mxnet.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: '../../../',
VERSION: '',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true,
SOURCELINK_SUFFIX: ''
};
</script>
<script src="../../../_static/jquery-1.11.1.js" type="text/javascript"></script>
<script src="../../../_static/underscore.js" type="text/javascript"></script>
<script src="../../../_static/searchtools_custom.js" type="text/javascript"></script>
<script src="../../../_static/doctools.js" type="text/javascript"></script>
<script src="../../../_static/selectlang.js" type="text/javascript"></script>
<script src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML" type="text/javascript"></script>
<script type="text/javascript"> jQuery(function() { Search.loadIndex("/searchindex.js"); Search.init();}); </script>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new
Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-96378503-1', 'auto');
ga('send', 'pageview');
</script>
<!-- -->
<!-- <script type="text/javascript" src="../../../_static/jquery.js"></script> -->
<!-- -->
<!-- <script type="text/javascript" src="../../../_static/underscore.js"></script> -->
<!-- -->
<!-- <script type="text/javascript" src="../../../_static/doctools.js"></script> -->
<!-- -->
<!-- <script type="text/javascript" src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script> -->
<!-- -->
<link href="../index.html" rel="up" title="MXNet - Python API">
<link href="../gluon/gluon.html" rel="next" title="Gluon Package"/>
<link href="../executor/executor.html" rel="prev" title="Executor and Executor Manager"/>
<link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png">
</link></link></head>
<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
<div class="content-block"><div class="navbar navbar-fixed-top">
<div class="container" id="navContainer">
<div class="innder" id="header-inner">
<h1 id="logo-wrap">
<a href="../../../" id="logo"><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet_logo.png"/></a>
</h1>
<nav class="nav-bar" id="main-nav">
<a class="main-nav-link" href="../../../install/index.html">Install</a>
<a class="main-nav-link" href="../../../tutorials/index.html">Tutorials</a>
<span id="dropdown-menu-position-anchor">
<a aria-expanded="true" aria-haspopup="true" class="main-nav-link dropdown-toggle" data-toggle="dropdown" href="#" role="button">Gluon <span class="caret"></span></a>
<ul class="dropdown-menu navbar-menu" id="package-dropdown-menu">
<li><a class="main-nav-link" href="../../../gluon/index.html">About</a></li>
<li><a class="main-nav-link" href="http://gluon.mxnet.io">Tutorials</a></li>
</ul>
</span>
<span id="dropdown-menu-position-anchor">
<a aria-expanded="true" aria-haspopup="true" class="main-nav-link dropdown-toggle" data-toggle="dropdown" href="#" role="button">API <span class="caret"></span></a>
<ul class="dropdown-menu navbar-menu" id="package-dropdown-menu">
<li><a class="main-nav-link" href="../../../api/python/index.html">Python</a></li>
<li><a class="main-nav-link" href="../../../api/scala/index.html">Scala</a></li>
<li><a class="main-nav-link" href="../../../api/r/index.html">R</a></li>
<li><a class="main-nav-link" href="../../../api/julia/index.html">Julia</a></li>
<li><a class="main-nav-link" href="../../../api/c++/index.html">C++</a></li>
<li><a class="main-nav-link" href="../../../api/perl/index.html">Perl</a></li>
</ul>
</span>
<span id="dropdown-menu-position-anchor-docs">
<a aria-expanded="true" aria-haspopup="true" class="main-nav-link dropdown-toggle" data-toggle="dropdown" href="#" role="button">Docs <span class="caret"></span></a>
<ul class="dropdown-menu navbar-menu" id="package-dropdown-menu-docs">
<li><a class="main-nav-link" href="../../../faq/index.html">FAQ</a></li>
<li><a class="main-nav-link" href="../../../architecture/index.html">Architecture</a></li>
<li><a class="main-nav-link" href="https://github.com/apache/incubator-mxnet/tree/0.12.0/example">Examples</a></li>
<li><a class="main-nav-link" href="../../../model_zoo/index.html">Model Zoo</a></li>
</ul>
</span>
<a class="main-nav-link" href="https://github.com/dmlc/mxnet">Github</a>
<span id="dropdown-menu-position-anchor-community">
<a aria-expanded="true" aria-haspopup="true" class="main-nav-link dropdown-toggle" data-toggle="dropdown" href="#" role="button">Community <span class="caret"></span></a>
<ul class="dropdown-menu navbar-menu" id="package-dropdown-menu-community">
<li><a class="main-nav-link" href="../../../community/index.html">Community</a></li>
<li><a class="main-nav-link" href="../../../community/contribute.html">Contribute</a></li>
<li><a class="main-nav-link" href="../../../community/powered_by.html">Powered By</a></li>
</ul>
</span>
<a class="main-nav-link" href="http://discuss.mxnet.io">Discuss</a>
<span id="dropdown-menu-position-anchor-version" style="position: relative"><a href="#" class="main-nav-link dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="true">Versions(0.12.0)<span class="caret"></span></a><ul id="package-dropdown-menu" class="dropdown-menu"><li><a class="main-nav-link" href=https://mxnet.incubator.apache.org/>1.0.0</a></li><li><a class="main-nav-link" href=https://mxnet.incubator.apache.org/versions/0.12.1/index.html>0.12.1</a></li><li><a class="main-nav-link" href=https://mxnet.incubator.apache.org/versions/0.12.0/index.html>0.12.0</a></li><li><a class="main-nav-link" href=https://mxnet.incubator.apache.org/versions/0.11.0/index.html>0.11.0</a></li><li><a class="main-nav-link" href=https://mxnet.incubator.apache.org/versions/master/index.html>master</a></li></ul></span></nav>
<script> function getRootPath(){ return "../../../" } </script>
<div class="burgerIcon dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#" role="button"></a>
<ul class="dropdown-menu" id="burgerMenu">
<li><a href="../../../install/index.html">Install</a></li>
<li><a class="main-nav-link" href="../../../tutorials/index.html">Tutorials</a></li>
<li class="dropdown-submenu">
<a href="#" tabindex="-1">Community</a>
<ul class="dropdown-menu">
<li><a href="../../../community/index.html" tabindex="-1">Community</a></li>
<li><a href="../../../community/contribute.html" tabindex="-1">Contribute</a></li>
<li><a href="../../../community/powered_by.html" tabindex="-1">Powered By</a></li>
</ul>
</li>
<li class="dropdown-submenu">
<a href="#" tabindex="-1">API</a>
<ul class="dropdown-menu">
<li><a href="../../../api/python/index.html" tabindex="-1">Python</a>
</li>
<li><a href="../../../api/scala/index.html" tabindex="-1">Scala</a>
</li>
<li><a href="../../../api/r/index.html" tabindex="-1">R</a>
</li>
<li><a href="../../../api/julia/index.html" tabindex="-1">Julia</a>
</li>
<li><a href="../../../api/c++/index.html" tabindex="-1">C++</a>
</li>
<li><a href="../../../api/perl/index.html" tabindex="-1">Perl</a>
</li>
</ul>
</li>
<li class="dropdown-submenu">
<a href="#" tabindex="-1">Docs</a>
<ul class="dropdown-menu">
<li><a href="../../../tutorials/index.html" tabindex="-1">Tutorials</a></li>
<li><a href="../../../faq/index.html" tabindex="-1">FAQ</a></li>
<li><a href="../../../architecture/index.html" tabindex="-1">Architecture</a></li>
<li><a href="https://github.com/apache/incubator-mxnet/tree/0.12.0/example" tabindex="-1">Examples</a></li>
<li><a href="../../../model_zoo/index.html" tabindex="-1">Model Zoo</a></li>
</ul>
</li>
<li><a href="../../../architecture/index.html">Architecture</a></li>
<li><a class="main-nav-link" href="https://github.com/dmlc/mxnet">Github</a></li>
<li id="dropdown-menu-position-anchor-version-mobile" class="dropdown-submenu" style="position: relative"><a href="#" tabindex="-1">Versions(0.12.0)</a><ul class="dropdown-menu"><li><a tabindex="-1" href=https://mxnet.incubator.apache.org/>1.0.0</a></li><li><a tabindex="-1" href=https://mxnet.incubator.apache.org/versions/0.12.1/index.html>0.12.1</a></li><li><a tabindex="-1" href=https://mxnet.incubator.apache.org/versions/0.12.0/index.html>0.12.0</a></li><li><a tabindex="-1" href=https://mxnet.incubator.apache.org/versions/0.11.0/index.html>0.11.0</a></li><li><a tabindex="-1" href=https://mxnet.incubator.apache.org/versions/master/index.html>master</a></li></ul></li></ul>
</div>
<div class="plusIcon dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#" role="button"><span aria-hidden="true" class="glyphicon glyphicon-plus"></span></a>
<ul class="dropdown-menu dropdown-menu-right" id="plusMenu"></ul>
</div>
<div id="search-input-wrap">
<form action="../../../search.html" autocomplete="off" class="" method="get" role="search">
<div class="form-group inner-addon left-addon">
<i class="glyphicon glyphicon-search"></i>
<input class="form-control" name="q" placeholder="Search" type="text"/>
</div>
<input name="check_keywords" type="hidden" value="yes"/>
<input name="area" type="hidden" value="default"/>
</form>
<div id="search-preview"></div>
</div>
<div id="searchIcon">
<span aria-hidden="true" class="glyphicon glyphicon-search"></span>
</div>
<!-- <div id="lang-select-wrap"> -->
<!-- <label id="lang-select-label"> -->
<!-- <\!-- <i class="fa fa-globe"></i> -\-> -->
<!-- <span></span> -->
<!-- </label> -->
<!-- <select id="lang-select"> -->
<!-- <option value="en">Eng</option> -->
<!-- <option value="zh">中文</option> -->
<!-- </select> -->
<!-- </div> -->
<!-- <a id="mobile-nav-toggle">
<span class="mobile-nav-toggle-bar"></span>
<span class="mobile-nav-toggle-bar"></span>
<span class="mobile-nav-toggle-bar"></span>
</a> -->
</div>
</div>
</div>
<script type="text/javascript">
$('body').css('background', 'white');
</script>
<div class="container">
<div class="row">
<div aria-label="main navigation" class="sphinxsidebar leftsidebar" role="navigation">
<div class="sphinxsidebarwrapper">
<ul class="current">
<li class="toctree-l1 current"><a class="reference internal" href="../index.html">Python Documents</a><ul class="current">
<li class="toctree-l2"><a class="reference internal" href="../index.html#ndarray-api">NDArray API</a></li>
<li class="toctree-l2"><a class="reference internal" href="../index.html#symbol-api">Symbol API</a></li>
<li class="toctree-l2"><a class="reference internal" href="../index.html#module-api">Module API</a></li>
<li class="toctree-l2 current"><a class="reference internal" href="../index.html#autograd-api">Autograd API</a><ul class="current">
<li class="toctree-l3 current"><a class="current reference internal" href="">Autograd Package</a><ul>
<li class="toctree-l4"><a class="reference internal" href="#overview">Overview</a></li>
<li class="toctree-l4"><a class="reference internal" href="#train-mode-and-predict-mode">Train mode and Predict Mode</a></li>
<li class="toctree-l4"><a class="reference internal" href="#autograd">Autograd</a></li>
<li class="toctree-l4"><a class="reference internal" href="#api-reference">API Reference</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../index.html#gluon-api">Gluon API</a></li>
<li class="toctree-l2"><a class="reference internal" href="../index.html#kvstore-api">KVStore API</a></li>
<li class="toctree-l2"><a class="reference internal" href="../index.html#io-api">IO API</a></li>
<li class="toctree-l2"><a class="reference internal" href="../index.html#image-api">Image API</a></li>
<li class="toctree-l2"><a class="reference internal" href="../index.html#optimization-api">Optimization API</a></li>
<li class="toctree-l2"><a class="reference internal" href="../index.html#callback-api">Callback API</a></li>
<li class="toctree-l2"><a class="reference internal" href="../index.html#metric-api">Metric API</a></li>
<li class="toctree-l2"><a class="reference internal" href="../index.html#run-time-compilation-api">Run-Time Compilation API</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../r/index.html">R Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../julia/index.html">Julia Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../c++/index.html">C++ Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../scala/index.html">Scala Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../perl/index.html">Perl Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../faq/index.html">HowTo Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../architecture/index.html">System Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../tutorials/index.html">Tutorials</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../../community/index.html">Community</a></li>
</ul>
</div>
</div>
<div class="content">
<div class="page-tracker"></div>
<div class="section" id="autograd-package">
<span id="autograd-package"></span><h1>Autograd Package<a class="headerlink" href="#autograd-package" title="Permalink to this headline"></a></h1>
<div class="admonition warning">
<p class="first admonition-title">Warning</p>
<p class="last">This package is currently experimental and may change in the near future.</p>
</div>
<div class="section" id="overview">
<span id="overview"></span><h2>Overview<a class="headerlink" href="#overview" title="Permalink to this headline"></a></h2>
<p>The <code class="docutils literal"><span class="pre">autograd</span></code> package enables automatic
differentiation of NDArray operations.
In machine learning applications,
<code class="docutils literal"><span class="pre">autograd</span></code> is often used to calculate the gradients
of loss functions with respect to parameters.</p>
<div class="section" id="record-vs-pause">
<span id="record-vs-pause"></span><h3>Record vs Pause<a class="headerlink" href="#record-vs-pause" title="Permalink to this headline"></a></h3>
<p><code class="docutils literal"><span class="pre">autograd</span></code> records computation history on the fly to calculate gradients later.
This is only enabled inside a <code class="docutils literal"><span class="pre">with</span> <span class="pre">autograd.record():</span></code> block.
A <code class="docutils literal"><span class="pre">with</span> <span class="pre">auto_grad.pause()</span></code> block can be used inside a <code class="docutils literal"><span class="pre">record()</span></code> block
to temporarily disable recording.</p>
<p>To compute gradient with respect to an <code class="docutils literal"><span class="pre">NDArray</span></code> <code class="docutils literal"><span class="pre">x</span></code>, first call <code class="docutils literal"><span class="pre">x.attach_grad()</span></code>
to allocate space for the gradient. Then, start a <code class="docutils literal"><span class="pre">with</span> <span class="pre">autograd.record()</span></code> block,
and do some computation. Finally, call <code class="docutils literal"><span class="pre">backward()</span></code> on the result:</p>
<div class="highlight-python"><div class="highlight"><pre><span></span><span class="gp">>>> </span><span class="n">x</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span><span class="mi">2</span><span class="p">,</span><span class="mi">3</span><span class="p">,</span><span class="mi">4</span><span class="p">])</span>
<span class="gp">>>> </span><span class="n">x</span><span class="o">.</span><span class="n">attach_grad</span><span class="p">()</span>
<span class="gp">>>> </span><span class="k">with</span> <span class="n">mx</span><span class="o">.</span><span class="n">autograd</span><span class="o">.</span><span class="n">record</span><span class="p">():</span>
<span class="gp">... </span> <span class="n">y</span> <span class="o">=</span> <span class="n">x</span> <span class="o">*</span> <span class="n">x</span> <span class="o">+</span> <span class="mi">1</span>
<span class="gp">>>> </span><span class="n">y</span><span class="o">.</span><span class="n">backward</span><span class="p">()</span>
<span class="gp">>>> </span><span class="k">print</span><span class="p">(</span><span class="n">x</span><span class="o">.</span><span class="n">grad</span><span class="p">)</span>
<span class="go">[ 2. 4. 6. 8.]</span>
<span class="go"><ndarray 4="" @cpu(0)=""></ndarray></span>
</pre></div>
</div>
</div>
</div>
<div class="section" id="train-mode-and-predict-mode">
<span id="train-mode-and-predict-mode"></span><h2>Train mode and Predict Mode<a class="headerlink" href="#train-mode-and-predict-mode" title="Permalink to this headline"></a></h2>
<p>Some operators (Dropout, BatchNorm, etc) behave differently in
when training and when making predictions.
This can be controlled with <code class="docutils literal"><span class="pre">train_mode</span></code> and <code class="docutils literal"><span class="pre">predict_mode</span></code> scope.</p>
<p>By default, MXNet is in <code class="docutils literal"><span class="pre">predict_mode</span></code>.
A <code class="docutils literal"><span class="pre">with</span> <span class="pre">autograd.record()</span></code> block by default turns on <code class="docutils literal"><span class="pre">train_mode</span></code>
(equivalent to <code class="docutils literal"><span class="pre">with</span> <span class="pre">autograd.record(train_mode=True)</span></code>).
To compute a gradient in prediction mode (as when generating adversarial examples),
call record with <code class="docutils literal"><span class="pre">train_mode=False</span></code> and then call <code class="docutils literal"><span class="pre">backward(train_mode=False)</span></code></p>
<p>Although training usually coincides with recording,
this isn’t always the case.
To control <em>training</em> vs <em>predict_mode</em> without changing
<em>recording</em> vs <em>not recording</em>,
Use a <code class="docutils literal"><span class="pre">with</span> <span class="pre">autograd.train_mode():</span></code>
or <code class="docutils literal"><span class="pre">with</span> <span class="pre">autograd.predict_mode():</span></code> block.</p>
<p>Detailed tutorials are available in Part 1 of
<a class="reference external" href="http://gluon.mxnet.io/">the MXNet gluon book</a>.</p>
<script src="../../_static/js/auto_module_index.js" type="text/javascript"></script></div>
<div class="section" id="autograd">
<span id="autograd"></span><h2>Autograd<a class="headerlink" href="#autograd" title="Permalink to this headline"></a></h2>
<table border="1" class="longtable docutils">
<colgroup>
<col width="10%"/>
<col width="90%"/>
</colgroup>
<tbody valign="top">
<tr class="row-odd"><td><a class="reference internal" href="#mxnet.autograd.record" title="mxnet.autograd.record"><code class="xref py py-obj docutils literal"><span class="pre">record</span></code></a></td>
<td>Returns an autograd recording scope context to be used in ‘with’ statement and captures code that needs gradients to be calculated.</td>
</tr>
<tr class="row-even"><td><a class="reference internal" href="#mxnet.autograd.pause" title="mxnet.autograd.pause"><code class="xref py py-obj docutils literal"><span class="pre">pause</span></code></a></td>
<td>Returns a scope context to be used in ‘with’ statement for codes that do not need gradients to be calculated.</td>
</tr>
<tr class="row-odd"><td><a class="reference internal" href="#mxnet.autograd.train_mode" title="mxnet.autograd.train_mode"><code class="xref py py-obj docutils literal"><span class="pre">train_mode</span></code></a></td>
<td>Returns a scope context to be used in ‘with’ statement in which forward pass behavior is set to training mode, without changing the recording states.</td>
</tr>
<tr class="row-even"><td><a class="reference internal" href="#mxnet.autograd.predict_mode" title="mxnet.autograd.predict_mode"><code class="xref py py-obj docutils literal"><span class="pre">predict_mode</span></code></a></td>
<td>Returns a scope context to be used in ‘with’ statement in which forward pass behavior is set to inference mode, without changing the recording states.</td>
</tr>
<tr class="row-odd"><td><a class="reference internal" href="#mxnet.autograd.backward" title="mxnet.autograd.backward"><code class="xref py py-obj docutils literal"><span class="pre">backward</span></code></a></td>
<td>Compute the gradients of heads w.r.t previously marked variables.</td>
</tr>
<tr class="row-even"><td><a class="reference internal" href="#mxnet.autograd.set_training" title="mxnet.autograd.set_training"><code class="xref py py-obj docutils literal"><span class="pre">set_training</span></code></a></td>
<td>Set status to training/predicting.</td>
</tr>
<tr class="row-odd"><td><a class="reference internal" href="#mxnet.autograd.is_training" title="mxnet.autograd.is_training"><code class="xref py py-obj docutils literal"><span class="pre">is_training</span></code></a></td>
<td>Get status on training/predicting.</td>
</tr>
<tr class="row-even"><td><a class="reference internal" href="#mxnet.autograd.set_recording" title="mxnet.autograd.set_recording"><code class="xref py py-obj docutils literal"><span class="pre">set_recording</span></code></a></td>
<td>Set status to recording/not recording.</td>
</tr>
<tr class="row-odd"><td><a class="reference internal" href="#mxnet.autograd.is_recording" title="mxnet.autograd.is_recording"><code class="xref py py-obj docutils literal"><span class="pre">is_recording</span></code></a></td>
<td>Get status on recording/not recording.</td>
</tr>
<tr class="row-even"><td><a class="reference internal" href="#mxnet.autograd.mark_variables" title="mxnet.autograd.mark_variables"><code class="xref py py-obj docutils literal"><span class="pre">mark_variables</span></code></a></td>
<td>Mark NDArrays as variables to compute gradient for autograd.</td>
</tr>
<tr class="row-odd"><td><a class="reference internal" href="#mxnet.autograd.Function" title="mxnet.autograd.Function"><code class="xref py py-obj docutils literal"><span class="pre">Function</span></code></a></td>
<td>User-defined differentiable function.</td>
</tr>
</tbody>
</table>
</div>
<div class="section" id="api-reference">
<span id="api-reference"></span><h2>API Reference<a class="headerlink" href="#api-reference" title="Permalink to this headline"></a></h2>
<script src="../../_static/js/auto_module_index.js" type="text/javascript"></script><span class="target" id="module-mxnet.autograd"></span><p>Autograd for NDArray.</p>
<dl class="function">
<dt id="mxnet.autograd.set_recording">
<code class="descclassname">mxnet.autograd.</code><code class="descname">set_recording</code><span class="sig-paren">(</span><em>is_recording</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.set_recording" title="Permalink to this definition"></a></dt>
<dd><p>Set status to recording/not recording. When recording, graph will be constructed
for gradient computation.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><strong>is_recording</strong> (<em>bool</em>) – </td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"></td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body">previous state before this set.</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="function">
<dt id="mxnet.autograd.set_training">
<code class="descclassname">mxnet.autograd.</code><code class="descname">set_training</code><span class="sig-paren">(</span><em>train_mode</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.set_training" title="Permalink to this definition"></a></dt>
<dd><p>Set status to training/predicting. This affects ctx.is_train in operator
running context. For example, Dropout will drop inputs randomly when
train_mode=True while simply passing through if train_mode=False.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><strong>train_mode</strong> (<em>bool</em>) – </td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"></td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body">previous state before this set.</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="function">
<dt id="mxnet.autograd.is_recording">
<code class="descclassname">mxnet.autograd.</code><code class="descname">is_recording</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.is_recording" title="Permalink to this definition"></a></dt>
<dd><p>Get status on recording/not recording.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body"></td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">Current state of recording.</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="function">
<dt id="mxnet.autograd.is_training">
<code class="descclassname">mxnet.autograd.</code><code class="descname">is_training</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.is_training" title="Permalink to this definition"></a></dt>
<dd><p>Get status on training/predicting.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Returns:</th><td class="field-body"></td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body">Current state of training/predicting.</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="function">
<dt id="mxnet.autograd.record">
<code class="descclassname">mxnet.autograd.</code><code class="descname">record</code><span class="sig-paren">(</span><em>train_mode=True</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.record" title="Permalink to this definition"></a></dt>
<dd><p>Returns an autograd recording scope context to be used in ‘with’ statement
and captures code that needs gradients to be calculated.</p>
<div class="admonition note">
<p class="first admonition-title">Note</p>
<p class="last">When forwarding with train_mode=False, the corresponding backward
should also use train_mode=False, otherwise gradient is undefined.</p>
</div>
<p>Example:</p>
<div class="highlight-python"><div class="highlight"><pre><span></span><span class="k">with</span> <span class="n">autograd</span><span class="o">.</span><span class="n">record</span><span class="p">():</span>
<span class="n">y</span> <span class="o">=</span> <span class="n">model</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="n">backward</span><span class="p">([</span><span class="n">y</span><span class="p">])</span>
<span class="n">metric</span><span class="o">.</span><span class="n">update</span><span class="p">(</span><span class="o">...</span><span class="p">)</span>
<span class="n">optim</span><span class="o">.</span><span class="n">step</span><span class="p">(</span><span class="o">...</span><span class="p">)</span>
</pre></div>
</div>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><strong>train_mode</strong> (<em>bool, default True</em>) – Whether the forward pass is in training or predicting mode. This controls the behavior
of some layers such as Dropout, BatchNorm.</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="function">
<dt id="mxnet.autograd.pause">
<code class="descclassname">mxnet.autograd.</code><code class="descname">pause</code><span class="sig-paren">(</span><em>train_mode=False</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.pause" title="Permalink to this definition"></a></dt>
<dd><p>Returns a scope context to be used in ‘with’ statement for codes that do not need
gradients to be calculated.</p>
<p>Example:</p>
<div class="highlight-python"><div class="highlight"><pre><span></span>with autograd.record():
y = model(x)
backward([y])
with autograd.pause():
# testing, IO, gradient updates...
</pre></div>
</div>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><strong>train_mode</strong> (<em>bool, default False</em>) – Whether to do forward for training or predicting.</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="function">
<dt id="mxnet.autograd.train_mode">
<code class="descclassname">mxnet.autograd.</code><code class="descname">train_mode</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.train_mode" title="Permalink to this definition"></a></dt>
<dd><p>Returns a scope context to be used in ‘with’ statement
in which forward pass behavior is set to training mode,
without changing the recording states.</p>
<p>Example:</p>
<div class="highlight-python"><div class="highlight"><pre><span></span><span class="n">y</span> <span class="o">=</span> <span class="n">model</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="k">with</span> <span class="n">autograd</span><span class="o">.</span><span class="n">train_mode</span><span class="p">():</span>
<span class="n">y</span> <span class="o">=</span> <span class="n">dropout</span><span class="p">(</span><span class="n">y</span><span class="p">)</span>
</pre></div>
</div>
</dd></dl>
<dl class="function">
<dt id="mxnet.autograd.predict_mode">
<code class="descclassname">mxnet.autograd.</code><code class="descname">predict_mode</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.predict_mode" title="Permalink to this definition"></a></dt>
<dd><p>Returns a scope context to be used in ‘with’ statement
in which forward pass behavior is set to inference mode,
without changing the recording states.</p>
<p>Example:</p>
<div class="highlight-python"><div class="highlight"><pre><span></span><span class="k">with</span> <span class="n">autograd</span><span class="o">.</span><span class="n">record</span><span class="p">():</span>
<span class="n">y</span> <span class="o">=</span> <span class="n">model</span><span class="p">(</span><span class="n">x</span><span class="p">)</span>
<span class="k">with</span> <span class="n">autograd</span><span class="o">.</span><span class="n">predict_mode</span><span class="p">():</span>
<span class="n">y</span> <span class="o">=</span> <span class="n">sampling</span><span class="p">(</span><span class="n">y</span><span class="p">)</span>
<span class="n">backward</span><span class="p">([</span><span class="n">y</span><span class="p">])</span>
</pre></div>
</div>
</dd></dl>
<dl class="function">
<dt id="mxnet.autograd.mark_variables">
<code class="descclassname">mxnet.autograd.</code><code class="descname">mark_variables</code><span class="sig-paren">(</span><em>variables</em>, <em>gradients</em>, <em>grad_reqs='write'</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.mark_variables" title="Permalink to this definition"></a></dt>
<dd><p>Mark NDArrays as variables to compute gradient for autograd.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first last simple">
<li><strong>variables</strong> (<em>NDArray or list of NDArray</em>) – </li>
<li><strong>gradients</strong> (<em>NDArray or list of NDArray</em>) – </li>
<li><strong>grad_reqs</strong> (<em>str or list of str</em>) – </li>
</ul>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="function">
<dt id="mxnet.autograd.backward">
<code class="descclassname">mxnet.autograd.</code><code class="descname">backward</code><span class="sig-paren">(</span><em>heads</em>, <em>head_grads=None</em>, <em>retain_graph=False</em>, <em>train_mode=True</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.backward" title="Permalink to this definition"></a></dt>
<dd><p>Compute the gradients of heads w.r.t previously marked variables.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first last simple">
<li><strong>heads</strong> (<em>NDArray or list of NDArray</em>) – Output NDArray(s)</li>
<li><strong>head_grads</strong> (<em>NDArray or list of NDArray or None</em>) – Gradients with respect to heads.</li>
<li><strong>train_mode</strong> (<em>bool, optional</em>) – Whether to do backward for training or predicting.</li>
</ul>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="function">
<dt id="mxnet.autograd.grad">
<code class="descclassname">mxnet.autograd.</code><code class="descname">grad</code><span class="sig-paren">(</span><em>heads</em>, <em>variables</em>, <em>head_grads=None</em>, <em>retain_graph=None</em>, <em>create_graph=False</em>, <em>train_mode=True</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.grad" title="Permalink to this definition"></a></dt>
<dd><p>Compute the gradients of heads w.r.t variables. Gradients will be
returned as new NDArrays instead of stored into <cite>variable.grad</cite>.
Supports recording gradient graph for computing higher order gradients.</p>
<p>gradients.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>heads</strong> (<em>NDArray or list of NDArray</em>) – Output NDArray(s)</li>
<li><strong>variables</strong> (<em>NDArray or list of NDArray</em>) – Input variables to compute gradients for.</li>
<li><strong>head_grads</strong> (<em>NDArray or list of NDArray or None</em>) – Gradients with respect to heads.</li>
<li><strong>retain_graph</strong> (<em>bool</em>) – Whether to keep computation graph to differentiate again, instead
of clearing history and release memory. Defaults to the same value
as create_graph.</li>
<li><strong>create_graph</strong> (<em>bool</em>) – Whether to record gradient graph for computing higher order</li>
<li><strong>train_mode</strong> (<em>bool, optional</em>) – Whether to do backward for training or prediction.</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">Gradients with respect to variables.</p>
</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">NDArray or list of NDArray</p>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Examples</p>
<div class="highlight-python"><div class="highlight"><pre><span></span><span class="gp">>>> </span><span class="n">x</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">ones</span><span class="p">((</span><span class="mi">1</span><span class="p">,))</span>
<span class="gp">>>> </span><span class="n">x</span><span class="o">.</span><span class="n">attach_grad</span><span class="p">()</span>
<span class="gp">>>> </span><span class="k">with</span> <span class="n">mx</span><span class="o">.</span><span class="n">autograd</span><span class="o">.</span><span class="n">record</span><span class="p">():</span>
<span class="gp">... </span> <span class="n">z</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">elemwise_add</span><span class="p">(</span><span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">exp</span><span class="p">(</span><span class="n">x</span><span class="p">),</span> <span class="n">x</span><span class="p">)</span>
<span class="gp">>>> </span><span class="n">dx</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">autograd</span><span class="o">.</span><span class="n">grad</span><span class="p">(</span><span class="n">z</span><span class="p">,</span> <span class="p">[</span><span class="n">x</span><span class="p">],</span> <span class="n">create_graph</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
<span class="gp">>>> </span><span class="n">dx</span><span class="o">.</span><span class="n">backward</span><span class="p">()</span>
<span class="gp">>>> </span><span class="k">print</span><span class="p">(</span><span class="n">dx</span><span class="o">.</span><span class="n">grad</span><span class="p">)</span>
<span class="go">[</span>
<span class="go">[ 3.71828175]</span>
<span class="go"><ndarray 1="" @cpu(0)="">]</ndarray></span>
</pre></div>
</div>
</dd></dl>
<dl class="function">
<dt id="mxnet.autograd.get_symbol">
<code class="descclassname">mxnet.autograd.</code><code class="descname">get_symbol</code><span class="sig-paren">(</span><em>x</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.get_symbol" title="Permalink to this definition"></a></dt>
<dd><p>Retrieve recorded computation history as <cite>Symbol</cite>.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name"/>
<col class="field-body"/>
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><strong>x</strong> (<a class="reference internal" href="../ndarray/ndarray.html#mxnet.ndarray.NDArray" title="mxnet.ndarray.NDArray"><em>NDArray</em></a>) – Array representing the head of computation graph.</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body">The retrieved Symbol.</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><a class="reference internal" href="../symbol/symbol.html#mxnet.symbol.Symbol" title="mxnet.symbol.Symbol">Symbol</a></td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="class">
<dt id="mxnet.autograd.Function">
<em class="property">class </em><code class="descclassname">mxnet.autograd.</code><code class="descname">Function</code><a class="headerlink" href="#mxnet.autograd.Function" title="Permalink to this definition"></a></dt>
<dd><p>User-defined differentiable function.</p>
<p>Function allows defining both forward and backward computation for
custom operators. During gradient computation, the used-defined
backward function will be used instead of the default chain-rule.
You can also cast to numpy array and back for some operations in
forward and backward.</p>
<p>For example, a stable sigmoid function can be defined as:</p>
<div class="highlight-python"><div class="highlight"><pre><span></span><span class="k">class</span> <span class="nc">sigmoid</span><span class="p">(</span><span class="n">Function</span><span class="p">):</span>
<span class="k">def</span> <span class="nf">forward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">x</span><span class="p">):</span>
<span class="n">y</span> <span class="o">=</span> <span class="mi">1</span> <span class="o">/</span> <span class="p">(</span><span class="mi">1</span> <span class="o">+</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">exp</span><span class="p">(</span><span class="o">-</span><span class="n">x</span><span class="p">))</span>
<span class="bp">self</span><span class="o">.</span><span class="n">save_for_backward</span><span class="p">(</span><span class="n">y</span><span class="p">)</span>
<span class="k">return</span> <span class="n">y</span>
<span class="k">def</span> <span class="nf">backward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">dy</span><span class="p">):</span>
<span class="c1"># backward takes as many inputs as forward's return value,</span>
<span class="c1"># and returns as many NDArrays as forward's arguments.</span>
<span class="n">y</span><span class="p">,</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">saved_tensors</span>
<span class="k">return</span> <span class="n">y</span> <span class="o">*</span> <span class="p">(</span><span class="mi">1</span><span class="o">-</span><span class="n">y</span><span class="p">)</span>
</pre></div>
</div>
<dl class="method">
<dt id="mxnet.autograd.Function.forward">
<code class="descname">forward</code><span class="sig-paren">(</span><em>*inputs</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.Function.forward" title="Permalink to this definition"></a></dt>
<dd><p>Forward computation.</p>
</dd></dl>
<dl class="method">
<dt id="mxnet.autograd.Function.backward">
<code class="descname">backward</code><span class="sig-paren">(</span><em>*output_grads</em><span class="sig-paren">)</span><a class="headerlink" href="#mxnet.autograd.Function.backward" title="Permalink to this definition"></a></dt>
<dd><p>Backward computation.</p>
<p>Takes as many inputs as forward’s outputs,
and returns as many NDArrays as forward’s inputs.</p>
</dd></dl>
</dd></dl>
<script>auto_index("api-reference");</script></div>
</div>
</div>
</div>
<div aria-label="main navigation" class="sphinxsidebar rightsidebar" role="navigation">
<div class="sphinxsidebarwrapper">
<h3><a href="../../../index.html">Table Of Contents</a></h3>
<ul>
<li><a class="reference internal" href="#">Autograd Package</a><ul>
<li><a class="reference internal" href="#overview">Overview</a><ul>
<li><a class="reference internal" href="#record-vs-pause">Record vs Pause</a></li>
</ul>
</li>
<li><a class="reference internal" href="#train-mode-and-predict-mode">Train mode and Predict Mode</a></li>
<li><a class="reference internal" href="#autograd">Autograd</a></li>
<li><a class="reference internal" href="#api-reference">API Reference</a></li>
</ul>
</li>
</ul>
</div>
</div>
</div><div class="footer">
<div class="section-disclaimer">
<div class="container">
<div>
<img height="60" src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/apache_incubator_logo.png"/>
<p>
Apache MXNet is an effort undergoing incubation at The Apache Software Foundation (ASF), <strong>sponsored by the <i>Apache Incubator</i></strong>. Incubation is required of all newly accepted projects until a further review indicates that the infrastructure, communications, and decision making process have stabilized in a manner consistent with other successful ASF projects. While incubation status is not necessarily a reflection of the completeness or stability of the code, it does indicate that the project has yet to be fully endorsed by the ASF.
</p>
<p>
"Copyright © 2017, The Apache Software Foundation
Apache MXNet, MXNet, Apache, the Apache feather, and the Apache MXNet project logo are either registered trademarks or trademarks of the Apache Software Foundation."
</p>
</div>
</div>
</div>
</div> <!-- pagename != index -->
</div>
<script crossorigin="anonymous" integrity="sha384-0mSbJDEHialfmuBBQP6A4Qrprq5OVfW37PRR3j5ELqxss1yVqOtnepnHVP9aJ7xS" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/js/bootstrap.min.js"></script>
<script src="../../../_static/js/sidebar.js" type="text/javascript"></script>
<script src="../../../_static/js/search.js" type="text/javascript"></script>
<script src="../../../_static/js/navbar.js" type="text/javascript"></script>
<script src="../../../_static/js/clipboard.min.js" type="text/javascript"></script>
<script src="../../../_static/js/copycode.js" type="text/javascript"></script>
<script src="../../../_static/js/page.js" type="text/javascript"></script>
<script type="text/javascript">
$('body').ready(function () {
$('body').css('visibility', 'visible');
});
</script>
</body>
</html>