Merge pull request #66 from ys2843/yang-patch3

sync beta website with the latest mxnet website
diff --git a/404.html b/404.html
index 4fa192d..6d75419 100644
--- a/404.html
+++ b/404.html
@@ -5,23 +5,23 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"><!-- Begin Jekyll SEO tag v2.6.1 -->
 <title>Apache MXNet | A flexible and efficient library for deep learning.</title>
-<meta name="generator" content="Jekyll v3.8.6" />
+<meta name="generator" content="Jekyll v4.0.0" />
 <meta property="og:title" content="Apache MXNet" />
 <meta property="og:locale" content="en_US" />
 <meta name="description" content="A flexible and efficient library for deep learning." />
 <meta property="og:description" content="A flexible and efficient library for deep learning." />
-<link rel="canonical" href="https://mxnet.incubator.apache.org/404.html" />
-<meta property="og:url" content="https://mxnet.incubator.apache.org/404.html" />
+<link rel="canonical" href="https://mxnet-beta.staged.apache.org//404.html" />
+<meta property="og:url" content="https://mxnet-beta.staged.apache.org//404.html" />
 <meta property="og:site_name" content="Apache MXNet" />
 <script type="application/ld+json">
-{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Apache MXNet","url":"https://mxnet.incubator.apache.org/404.html","@context":"https://schema.org"}</script>
+{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Apache MXNet","url":"https://mxnet-beta.staged.apache.org//404.html","@context":"https://schema.org"}</script>
 <!-- End Jekyll SEO tag -->
 <script src="https://medium-widget.pixelpoint.io/widget.js"></script>
-  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet.incubator.apache.org/feed.xml" title="Apache MXNet" /><script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
-
-  <script src="/assets/js/clipboard.js"></script>
-  <script src="/assets/js/copycode.js"></script>
-</head>
+  <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css" />
+  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet-beta.staged.apache.org//feed.xml" title="Apache MXNet" /><script src="/assets/js/jquery-3.3.1.min.js"></script><script src="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.js" defer></script>
+  <script src="/assets/js/globalSearch.js" defer></script>
+  <script src="/assets/js/clipboard.js" defer></script>
+  <script src="/assets/js/copycode.js" defer></script></head>
 <body><header class="site-header" role="banner">
 
   <script>
@@ -59,14 +59,173 @@
             </svg>
           </span>
       </label>
-
+      <div class="gs-search-border">
+        <div id="gs-search-icon"></div>
+        <form id="global-search-form">
+          <input id="global-search" type="text" title="Search" placeholder="Search" />
+          <div id="global-search-dropdown-container">
+            <button class="gs-current-version btn" type="button" data-toggle="dropdown">
+                <span id="gs-current-version-label">master</span>
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+          <span id="global-search-close">x</span>
+        </form>
+      </div>
       <div class="trigger">
+        <div id="global-search-mobile-border">
+          <div id="gs-search-icon-mobile"></div>
+          <input id="global-search-mobile" placeholder="Search..." type="text"/>
+          <div id="global-search-dropdown-container-mobile">
+            <button class="gs-current-version-mobile btn" type="button" data-toggle="dropdown">
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown-mobile">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+        </div>
         <a class="page-link" href="/get_started">Get Started</a>
         <a class="page-link" href="/blog">Blog</a>
         <a class="page-link" href="/features">Features</a>
         <a class="page-link" href="/ecosystem">Ecosystem</a>
         <a class="page-link" href="/api">Docs & Tutorials</a>
         <a class="page-link" href="https://github.com/apache/incubator-mxnet">GitHub</a>
+        <div class="dropdown">
+          <span class="dropdown-header">master
+            <svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
+          </span>
+          <div class="dropdown-content">
+            
+              
+                <a class="dropdown-option-active" href="/">master</a>
+              
+            
+              
+                <a href="/versions/1.6/">1.6</a>
+              
+            
+              
+                <a href="/versions/1.5.0/">1.5.0</a>
+              
+            
+              
+                <a href="/versions/1.4.1/">1.4.1</a>
+              
+            
+              
+                <a href="/versions/1.3.1/">1.3.1</a>
+              
+            
+              
+                <a href="/versions/1.2.1/">1.2.1</a>
+              
+            
+              
+                <a href="/versions/1.1.0/">1.1.0</a>
+              
+            
+              
+                <a href="/versions/1.0.0/">1.0.0</a>
+              
+            
+              
+                <a href="/versions/0.12.1/">0.12.1</a>
+              
+            
+              
+                <a href="/versions/0.11.0/">0.11.0</a>
+              
+            
+          </div>
+        </div>
       </div>
     </nav>
   </div>
@@ -89,8 +248,14 @@
 <div class="container">
   <h1>404</h1>
 
-  <p><strong>Page not found :(</strong></p>
-  <p>The requested page could not be found.</p>
+  <p><strong>Page not found :'(</strong></p>
+
+  <p><b>Due to a recent redesign of the website, some items have moved</b>
+    We're working towards adding redirects. Useful links:</p>
+  <div style="background-color:white;">
+    <li><a href="/api/python/docs/tutorials/">Python Tutorials</a></li>
+    <li><a href="/api/python/docs/api/">Python API Documentation</a></li>
+  </div>
 </div>
 
 </main><footer class="site-footer h-card">
@@ -99,8 +264,7 @@
             <div class="col-4">
                 <h4 class="footer-category-title">Resources</h4>
                 <ul class="contact-list">
-                    <li><a class="u-email" href="mailto:dev@mxnet.apache.org">Dev list</a></li>
-                    <li><a class="u-email" href="mailto:user@mxnet.apache.org">User mailing list</a></li>
+                    <li><a href="/community/contribute#mxnet-dev-communications">Mailing lists</a></li>
                     <li><a href="https://cwiki.apache.org/confluence/display/MXNET/Apache+MXNet+Home">Developer Wiki</a></li>
                     <li><a href="https://issues.apache.org/jira/projects/MXNET/issues">Jira Tracker</a></li>
                     <li><a href="https://github.com/apache/incubator-mxnet/labels/Roadmap">Github Roadmap</a></li>
diff --git a/api/architecture/exception_handling.html b/api/architecture/exception_handling.html
index 4227978..521d56b 100644
--- a/api/architecture/exception_handling.html
+++ b/api/architecture/exception_handling.html
@@ -5,23 +5,23 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"><!-- Begin Jekyll SEO tag v2.6.1 -->
 <title>Exception Handling in MXNet | Apache MXNet</title>
-<meta name="generator" content="Jekyll v3.8.6" />
+<meta name="generator" content="Jekyll v4.0.0" />
 <meta property="og:title" content="Exception Handling in MXNet" />
 <meta property="og:locale" content="en_US" />
 <meta name="description" content="A flexible and efficient library for deep learning." />
 <meta property="og:description" content="A flexible and efficient library for deep learning." />
-<link rel="canonical" href="https://mxnet.incubator.apache.org/api/architecture/exception_handling" />
-<meta property="og:url" content="https://mxnet.incubator.apache.org/api/architecture/exception_handling" />
+<link rel="canonical" href="https://mxnet-beta.staged.apache.org//api/architecture/exception_handling" />
+<meta property="og:url" content="https://mxnet-beta.staged.apache.org//api/architecture/exception_handling" />
 <meta property="og:site_name" content="Apache MXNet" />
 <script type="application/ld+json">
-{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Exception Handling in MXNet","url":"https://mxnet.incubator.apache.org/api/architecture/exception_handling","@context":"https://schema.org"}</script>
+{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Exception Handling in MXNet","url":"https://mxnet-beta.staged.apache.org//api/architecture/exception_handling","@context":"https://schema.org"}</script>
 <!-- End Jekyll SEO tag -->
 <script src="https://medium-widget.pixelpoint.io/widget.js"></script>
-  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet.incubator.apache.org/feed.xml" title="Apache MXNet" /><script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
-
-  <script src="/assets/js/clipboard.js"></script>
-  <script src="/assets/js/copycode.js"></script>
-</head>
+  <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css" />
+  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet-beta.staged.apache.org//feed.xml" title="Apache MXNet" /><script src="/assets/js/jquery-3.3.1.min.js"></script><script src="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.js" defer></script>
+  <script src="/assets/js/globalSearch.js" defer></script>
+  <script src="/assets/js/clipboard.js" defer></script>
+  <script src="/assets/js/copycode.js" defer></script></head>
 <body><header class="site-header" role="banner">
 
   <script>
@@ -59,14 +59,173 @@
             </svg>
           </span>
       </label>
-
+      <div class="gs-search-border">
+        <div id="gs-search-icon"></div>
+        <form id="global-search-form">
+          <input id="global-search" type="text" title="Search" placeholder="Search" />
+          <div id="global-search-dropdown-container">
+            <button class="gs-current-version btn" type="button" data-toggle="dropdown">
+                <span id="gs-current-version-label">master</span>
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+          <span id="global-search-close">x</span>
+        </form>
+      </div>
       <div class="trigger">
+        <div id="global-search-mobile-border">
+          <div id="gs-search-icon-mobile"></div>
+          <input id="global-search-mobile" placeholder="Search..." type="text"/>
+          <div id="global-search-dropdown-container-mobile">
+            <button class="gs-current-version-mobile btn" type="button" data-toggle="dropdown">
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown-mobile">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+        </div>
         <a class="page-link" href="/get_started">Get Started</a>
         <a class="page-link" href="/blog">Blog</a>
         <a class="page-link" href="/features">Features</a>
         <a class="page-link" href="/ecosystem">Ecosystem</a>
         <a class="page-link" href="/api">Docs & Tutorials</a>
         <a class="page-link" href="https://github.com/apache/incubator-mxnet">GitHub</a>
+        <div class="dropdown">
+          <span class="dropdown-header">master
+            <svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
+          </span>
+          <div class="dropdown-content">
+            
+              
+                <a class="dropdown-option-active" href="/">master</a>
+              
+            
+              
+                <a href="/versions/1.6/">1.6</a>
+              
+            
+              
+                <a href="/versions/1.5.0/">1.5.0</a>
+              
+            
+              
+                <a href="/versions/1.4.1/">1.4.1</a>
+              
+            
+              
+                <a href="/versions/1.3.1/">1.3.1</a>
+              
+            
+              
+                <a href="/versions/1.2.1/">1.2.1</a>
+              
+            
+              
+                <a href="/versions/1.1.0/">1.1.0</a>
+              
+            
+              
+                <a href="/versions/1.0.0/">1.0.0</a>
+              
+            
+              
+                <a href="/versions/0.12.1/">0.12.1</a>
+              
+            
+              
+                <a href="/versions/0.11.0/">0.11.0</a>
+              
+            
+          </div>
+        </div>
       </div>
     </nav>
   </div>
@@ -128,8 +287,6 @@
             
               <!-- page-category -->
             
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/exception_handling">Exception Handling in MXNet</a></li>
               <!-- page-category -->
@@ -210,22 +367,6 @@
             
               <!-- page-category -->
             
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/note_data_loading">Efficient Data Loaders</a></li>
               <!-- page-category -->
@@ -238,14 +379,14 @@
             <li><a href="/api/architecture/note_memory">Memory Consumption</a></li>
               <!-- page-category -->
             
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/overview">MXNet System Architecture</a></li>
               <!-- page-category -->
             
               <!-- page-category -->
             
+              <!-- page-category -->
+            
             
             <li><a href="/api/architecture/program_model">Deep Learning Programming Paradigm</a></li>
               <!-- page-category -->
@@ -279,44 +420,25 @@
               <!-- page-category -->
             
               <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
                <!-- resource-p -->
         </ul>
     </div>
     <div class="col-9">
         <!--- Licensed to the Apache Software Foundation (ASF) under one -->
-
 <!--- or more contributor license agreements.  See the NOTICE file -->
-
 <!--- distributed with this work for additional information -->
-
 <!--- regarding copyright ownership.  The ASF licenses this file -->
-
 <!--- to you under the Apache License, Version 2.0 (the -->
-
 <!--- "License"); you may not use this file except in compliance -->
-
 <!--- with the License.  You may obtain a copy of the License at -->
 
 <!---   http://www.apache.org/licenses/LICENSE-2.0 -->
 
 <!--- Unless required by applicable law or agreed to in writing, -->
-
 <!--- software distributed under the License is distributed on an -->
-
 <!--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -->
-
 <!--- KIND, either express or implied.  See the License for the -->
-
 <!--- specific language governing permissions and limitations -->
-
 <!--- under the License. -->
 
 <h1 id="exception-handling-in-mxnet">Exception Handling in MXNet</h1>
@@ -326,11 +448,16 @@
 Although, the examples are in Python, they can be easily extended to MXNet
 language bindings.</p>
 
-<p>MXNet exceptions can be thrown from two areas:
-- MXNet main thread. For eg. Infershape and InferType.
-- Spawned threads:
-    * By dependency engine for operator execution in parallel
-    * By the iterators, during the data loading, text parsing phase etc.</p>
+<p>MXNet exceptions can be thrown from two areas:</p>
+<ul>
+  <li>MXNet main thread. For eg. Infershape and InferType.</li>
+  <li>Spawned threads:
+    <ul>
+      <li>By dependency engine for operator execution in parallel</li>
+      <li>By the iterators, during the data loading, text parsing phase etc.</li>
+    </ul>
+  </li>
+</ul>
 
 <p>In the first case, the exception is thrown and can be handled in the main thread.
 In the second case, the exception is thrown in a spawned thread, caught and transported to the
@@ -339,8 +466,10 @@
 
 <h2 id="prerequisites">Prerequisites</h2>
 
-<p>To complete this tutorial, we need:
-- MXNet <a href="https://github.com/apache/incubator-mxnet/commit/7b24137ed45df605defa4ce72ec91554f6e445f0">7b24137</a>. See Instructions in <a href="http://mxnet.io/install/index.html">Setup and Installation</a>.</p>
+<p>To complete this tutorial, we need:</p>
+<ul>
+  <li>MXNet <a href="https://github.com/apache/incubator-mxnet/commit/7b24137ed45df605defa4ce72ec91554f6e445f0">7b24137</a>. See Instructions in <a href="https://mxnet.io/get_started">Setup and Installation</a>.</li>
+</ul>
 
 <h2 id="exception-handling-for-iterators">Exception Handling for Iterators</h2>
 
@@ -354,10 +483,11 @@
 producer thread during the prefetching, when the label is not found corresponding to a specific sample.</p>
 
 <p>The exception is transported to the main thread, where it is rethrown when Next is
-called as part of the following line: <code>for batch in iter(data_train)</code>.</p>
+called as part of the following line: <code class="highlighter-rouge">for batch in iter(data_train)</code>.</p>
 
-<p>In general, Exception may be rethrown as part of <code>Next</code> and <code>BeforeFirst</code> calls which correspond to <code>reset()</code> and <code>next()</code> methods in <code>MXDataIter</code> for Python language bindings.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python"><span class="kn">import</span> <span class="nn">os</span>
+<p>In general, Exception may be rethrown as part of <code class="highlighter-rouge">Next</code> and <code class="highlighter-rouge">BeforeFirst</code> calls which correspond to <code class="highlighter-rouge">reset()</code> and <code class="highlighter-rouge">next()</code> methods in <code class="highlighter-rouge">MXDataIter</code> for Python language bindings.</p>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code><span class="kn">import</span> <span class="nn">os</span>
 <span class="kn">import</span> <span class="nn">mxnet</span> <span class="k">as</span> <span class="n">mx</span>
 
 <span class="n">cwd</span> <span class="o">=</span> <span class="n">os</span><span class="o">.</span><span class="n">getcwd</span><span class="p">()</span>
@@ -381,33 +511,37 @@
 <span class="k">except</span> <span class="n">mx</span><span class="o">.</span><span class="n">base</span><span class="o">.</span><span class="n">MXNetError</span> <span class="k">as</span> <span class="n">ex</span><span class="p">:</span>
     <span class="k">print</span><span class="p">(</span><span class="s">"Exception handled"</span><span class="p">)</span>
     <span class="k">print</span><span class="p">(</span><span class="n">ex</span><span class="p">)</span>
-</code></pre></div>
+</code></pre></div></div>
+
 <h3 id="limitation">Limitation</h3>
 
-<p>There is a race condition when your last <code>next()</code> call doesnt reach the batch in your dataset where exception occurs. Exception may or may not be thrown in this case depending on which thread wins the race. To avoid this situation, you should try and iterate through your full dataset if you think it can throw exceptions which need to be handled.</p>
+<p>There is a race condition when your last <code class="highlighter-rouge">next()</code> call doesnt reach the batch in your dataset where exception occurs. Exception may or may not be thrown in this case depending on which thread wins the race. To avoid this situation, you should try and iterate through your full dataset if you think it can throw exceptions which need to be handled.</p>
 
 <h2 id="exception-handling-for-operators">Exception Handling for Operators</h2>
 
 <p>The below example shows how to handle exceptions for operators in the imperative mode.</p>
 
-<p>For the operator case, the dependency engine spawns a number of threads if it is running in the <code>ThreadedEnginePool</code> or <code>ThreadedEnginePerDevice</code> mode. The final operator is executed in one of the spawned threads.</p>
+<p>For the operator case, the dependency engine spawns a number of threads if it is running in the <code class="highlighter-rouge">ThreadedEnginePool</code> or <code class="highlighter-rouge">ThreadedEnginePerDevice</code> mode. The final operator is executed in one of the spawned threads.</p>
 
 <p>If an operator throws an exception during execution, this exception is propagated
 down the dependency chain. Once there is a synchronizing call i.e. WaitToRead for a variable in the dependency chain, the propagated exception is rethrown.</p>
 
 <p>In the below example, I illustrate how an exception that occured in the first line is propagated down the dependency chain, and finally is rethrown when we make a synchronizing call to WaitToRead.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python"><span class="kn">import</span> <span class="nn">mxnet</span> <span class="k">as</span> <span class="n">mx</span>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code><span class="kn">import</span> <span class="nn">mxnet</span> <span class="k">as</span> <span class="n">mx</span>
 <span class="n">a</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">normal</span><span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
 <span class="n">b</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">normal</span><span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
 <span class="n">c</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">dot</span><span class="p">(</span><span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">)</span>
 <span class="n">d</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">normal</span><span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="o">-</span><span class="mi">1</span><span class="p">,</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
 <span class="n">e</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">dot</span><span class="p">(</span><span class="n">c</span><span class="p">,</span> <span class="n">d</span><span class="p">)</span>
 <span class="n">e</span><span class="o">.</span><span class="n">wait_to_read</span><span class="p">()</span>
-</code></pre></div>
-<p>Although the above exception occurs when executing the operation which writes to the variable d in one of the child threads, it is thrown only when the synchronization happens as part of the line: <code>e.wait_to_read()</code>.</p>
+</code></pre></div></div>
 
-<p>Let us take another example. In the following case, we write to two variables and then <code>wait_to_read</code> for both. This example shows that any particular exception will not be thrown more than once.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python"><span class="kn">import</span> <span class="nn">mxnet</span> <span class="k">as</span> <span class="n">mx</span>
+<p>Although the above exception occurs when executing the operation which writes to the variable d in one of the child threads, it is thrown only when the synchronization happens as part of the line: <code class="highlighter-rouge">e.wait_to_read()</code>.</p>
+
+<p>Let us take another example. In the following case, we write to two variables and then <code class="highlighter-rouge">wait_to_read</code> for both. This example shows that any particular exception will not be thrown more than once.</p>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code><span class="kn">import</span> <span class="nn">mxnet</span> <span class="k">as</span> <span class="n">mx</span>
 <span class="n">a</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">normal</span><span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="mi">1</span><span class="p">,</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
 <span class="n">b</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">normal</span><span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="o">-</span><span class="mi">1</span><span class="p">,</span> <span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">2</span><span class="p">))</span>
 <span class="n">c</span><span class="p">,</span> <span class="n">d</span>  <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">nd</span><span class="o">.</span><span class="n">dot</span><span class="p">(</span><span class="n">a</span><span class="p">,</span> <span class="n">b</span><span class="p">)</span>
@@ -416,7 +550,8 @@
 <span class="k">except</span> <span class="n">mx</span><span class="o">.</span><span class="n">base</span><span class="o">.</span><span class="n">MXNetError</span> <span class="k">as</span> <span class="n">ex</span><span class="p">:</span>
     <span class="k">print</span><span class="p">(</span><span class="s">"Exception handled"</span><span class="p">)</span>
 <span class="n">d</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
-</code></pre></div>
+</code></pre></div></div>
+
     </div>
 </div>
 
@@ -431,8 +566,7 @@
             <div class="col-4">
                 <h4 class="footer-category-title">Resources</h4>
                 <ul class="contact-list">
-                    <li><a class="u-email" href="mailto:dev@mxnet.apache.org">Dev list</a></li>
-                    <li><a class="u-email" href="mailto:user@mxnet.apache.org">User mailing list</a></li>
+                    <li><a href="/community/contribute#mxnet-dev-communications">Mailing lists</a></li>
                     <li><a href="https://cwiki.apache.org/confluence/display/MXNET/Apache+MXNet+Home">Developer Wiki</a></li>
                     <li><a href="https://issues.apache.org/jira/projects/MXNET/issues">Jira Tracker</a></li>
                     <li><a href="https://github.com/apache/incubator-mxnet/labels/Roadmap">Github Roadmap</a></li>
diff --git a/api/architecture/note_data_loading.html b/api/architecture/note_data_loading.html
index 4d60856..4953eec 100644
--- a/api/architecture/note_data_loading.html
+++ b/api/architecture/note_data_loading.html
@@ -5,23 +5,23 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"><!-- Begin Jekyll SEO tag v2.6.1 -->
 <title>Efficient Data Loaders | Apache MXNet</title>
-<meta name="generator" content="Jekyll v3.8.6" />
+<meta name="generator" content="Jekyll v4.0.0" />
 <meta property="og:title" content="Efficient Data Loaders" />
 <meta property="og:locale" content="en_US" />
 <meta name="description" content="A flexible and efficient library for deep learning." />
 <meta property="og:description" content="A flexible and efficient library for deep learning." />
-<link rel="canonical" href="https://mxnet.incubator.apache.org/api/architecture/note_data_loading" />
-<meta property="og:url" content="https://mxnet.incubator.apache.org/api/architecture/note_data_loading" />
+<link rel="canonical" href="https://mxnet-beta.staged.apache.org//api/architecture/note_data_loading" />
+<meta property="og:url" content="https://mxnet-beta.staged.apache.org//api/architecture/note_data_loading" />
 <meta property="og:site_name" content="Apache MXNet" />
 <script type="application/ld+json">
-{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Efficient Data Loaders","url":"https://mxnet.incubator.apache.org/api/architecture/note_data_loading","@context":"https://schema.org"}</script>
+{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Efficient Data Loaders","url":"https://mxnet-beta.staged.apache.org//api/architecture/note_data_loading","@context":"https://schema.org"}</script>
 <!-- End Jekyll SEO tag -->
 <script src="https://medium-widget.pixelpoint.io/widget.js"></script>
-  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet.incubator.apache.org/feed.xml" title="Apache MXNet" /><script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
-
-  <script src="/assets/js/clipboard.js"></script>
-  <script src="/assets/js/copycode.js"></script>
-</head>
+  <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css" />
+  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet-beta.staged.apache.org//feed.xml" title="Apache MXNet" /><script src="/assets/js/jquery-3.3.1.min.js"></script><script src="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.js" defer></script>
+  <script src="/assets/js/globalSearch.js" defer></script>
+  <script src="/assets/js/clipboard.js" defer></script>
+  <script src="/assets/js/copycode.js" defer></script></head>
 <body><header class="site-header" role="banner">
 
   <script>
@@ -59,14 +59,173 @@
             </svg>
           </span>
       </label>
-
+      <div class="gs-search-border">
+        <div id="gs-search-icon"></div>
+        <form id="global-search-form">
+          <input id="global-search" type="text" title="Search" placeholder="Search" />
+          <div id="global-search-dropdown-container">
+            <button class="gs-current-version btn" type="button" data-toggle="dropdown">
+                <span id="gs-current-version-label">master</span>
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+          <span id="global-search-close">x</span>
+        </form>
+      </div>
       <div class="trigger">
+        <div id="global-search-mobile-border">
+          <div id="gs-search-icon-mobile"></div>
+          <input id="global-search-mobile" placeholder="Search..." type="text"/>
+          <div id="global-search-dropdown-container-mobile">
+            <button class="gs-current-version-mobile btn" type="button" data-toggle="dropdown">
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown-mobile">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+        </div>
         <a class="page-link" href="/get_started">Get Started</a>
         <a class="page-link" href="/blog">Blog</a>
         <a class="page-link" href="/features">Features</a>
         <a class="page-link" href="/ecosystem">Ecosystem</a>
         <a class="page-link" href="/api">Docs & Tutorials</a>
         <a class="page-link" href="https://github.com/apache/incubator-mxnet">GitHub</a>
+        <div class="dropdown">
+          <span class="dropdown-header">master
+            <svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
+          </span>
+          <div class="dropdown-content">
+            
+              
+                <a class="dropdown-option-active" href="/">master</a>
+              
+            
+              
+                <a href="/versions/1.6/">1.6</a>
+              
+            
+              
+                <a href="/versions/1.5.0/">1.5.0</a>
+              
+            
+              
+                <a href="/versions/1.4.1/">1.4.1</a>
+              
+            
+              
+                <a href="/versions/1.3.1/">1.3.1</a>
+              
+            
+              
+                <a href="/versions/1.2.1/">1.2.1</a>
+              
+            
+              
+                <a href="/versions/1.1.0/">1.1.0</a>
+              
+            
+              
+                <a href="/versions/1.0.0/">1.0.0</a>
+              
+            
+              
+                <a href="/versions/0.12.1/">0.12.1</a>
+              
+            
+              
+                <a href="/versions/0.11.0/">0.11.0</a>
+              
+            
+          </div>
+        </div>
       </div>
     </nav>
   </div>
@@ -128,8 +287,6 @@
             
               <!-- page-category -->
             
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/exception_handling">Exception Handling in MXNet</a></li>
               <!-- page-category -->
@@ -210,22 +367,6 @@
             
               <!-- page-category -->
             
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/note_data_loading">Efficient Data Loaders</a></li>
               <!-- page-category -->
@@ -238,14 +379,14 @@
             <li><a href="/api/architecture/note_memory">Memory Consumption</a></li>
               <!-- page-category -->
             
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/overview">MXNet System Architecture</a></li>
               <!-- page-category -->
             
               <!-- page-category -->
             
+              <!-- page-category -->
+            
             
             <li><a href="/api/architecture/program_model">Deep Learning Programming Paradigm</a></li>
               <!-- page-category -->
@@ -281,42 +422,25 @@
               <!-- page-category -->
             
               <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
                <!-- resource-p -->
         </ul>
     </div>
     <div class="col-9">
         <!--- Licensed to the Apache Software Foundation (ASF) under one -->
-
 <!--- or more contributor license agreements.  See the NOTICE file -->
-
 <!--- distributed with this work for additional information -->
-
 <!--- regarding copyright ownership.  The ASF licenses this file -->
-
 <!--- to you under the Apache License, Version 2.0 (the -->
-
 <!--- "License"); you may not use this file except in compliance -->
-
 <!--- with the License.  You may obtain a copy of the License at -->
 
 <!---   http://www.apache.org/licenses/LICENSE-2.0 -->
 
 <!--- Unless required by applicable law or agreed to in writing, -->
-
 <!--- software distributed under the License is distributed on an -->
-
 <!--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -->
-
 <!--- KIND, either express or implied.  See the License for the -->
-
 <!--- specific language governing permissions and limitations -->
-
 <!--- under the License. -->
 
 <h1 id="designing-efficient-data-loaders-for-deep-learning">Designing Efficient Data Loaders for Deep Learning</h1>
@@ -334,23 +458,24 @@
 <p>We organize this design note as follows:</p>
 
 <ul>
-<li><strong>IO Design Insight:</strong>  Guiding principles in data loading design.</li>
-<li><strong>Data Format:</strong> Our solution using dmlc-core&#39;s binary recordIO implementation.</li>
-<li><strong>Data Loading:</strong> Our method to reduce IO cost by utilizing the threaded iterator provided by dmlc-core.</li>
-<li><strong>Interface Design:</strong> Our approach to facilitate writing MXNet data iterators in just a few lines of Python.</li>
-<li><strong>Future Extension:</strong> Prospective ideas for making data loading more flexible.</li>
+  <li><strong>IO Design Insight:</strong>  Guiding principles in data loading design.</li>
+  <li><strong>Data Format:</strong> Our solution using dmlc-core’s binary recordIO implementation.</li>
+  <li><strong>Data Loading:</strong> Our method to reduce IO cost by utilizing the threaded iterator provided by dmlc-core.</li>
+  <li><strong>Interface Design:</strong> Our approach to facilitate writing MXNet data iterators in just a few lines of Python.</li>
+  <li><strong>Future Extension:</strong> Prospective ideas for making data loading more flexible.</li>
 </ul>
 
 <p>Our analysis will motivate several requirements that an effective IO system should fulfill.</p>
 
-<p><strong><em>List of Key Requirements</em></strong>
-- Small file size.
-- Parallel (distributed) packing of data.
-- Fast data loading and online augmentation.
-- Quick reads from arbitrary parts of the dataset in the distributed setting.</p>
+<p><strong><em>List of Key Requirements</em></strong></p>
+<ul>
+  <li>Small file size.</li>
+  <li>Parallel (distributed) packing of data.</li>
+  <li>Fast data loading and online augmentation.</li>
+  <li>Quick reads from arbitrary parts of the dataset in the distributed setting.</li>
+</ul>
 
 <h2 id="design-insight">Design Insight</h2>
-
 <p>To design an IO system, we must address two kinds of tasks:
 data preparation and data loading.
 Data preparation is usually performed offline,
@@ -358,28 +483,28 @@
 In this section, we will introduce our insight of IO design involving the two phases.</p>
 
 <h3 id="data-preparation">Data Preparation</h3>
-
 <p>Data preparation describes the process of packing data
 into a desired format for later processing.
 When working with large datasets like ImageNet, this process can be time-consuming.
 In these cases, there are several heuristics we ought to follow:</p>
 
 <ul>
-<li>Pack the dataset into small numbers of files. A dataset may contain millions of data instances. Packed data distributes easily from machine to machine.</li>
-<li>Do the packing once. We don&#39;t want to repack data every time run-time settings, like the number of machines, are changed.</li>
-<li>Process the packing in parallel to save time.</li>
-<li>Be able to access arbitrary parts of the data easily. This is crucial for distributed machine learning when data parallelism is introduced. Things may get tricky when the data has been packed into several physical data files. The desired behavior could be: the packed data can be logically separated into arbitrary numbers of partitions, no matter how many physical data files there are. For example, if we pack 1000 images into 4 physical files, then each file contains 250 images. If we then use 10 machines to train a DNN, we should be able to load approximately 100 images per machine. Some machines may need images from different physical files.</li>
+  <li>Pack the dataset into small numbers of files. A dataset may contain millions of data instances. Packed data distributes easily from machine to machine.</li>
+  <li>Do the packing once. We don’t want to repack data every time run-time settings, like the number of machines, are changed.</li>
+  <li>Process the packing in parallel to save time.</li>
+  <li>Be able to access arbitrary parts of the data easily. This is crucial for distributed machine learning when data parallelism is introduced. Things may get tricky when the data has been packed into several physical data files. The desired behavior could be: the packed data can be logically separated into arbitrary numbers of partitions, no matter how many physical data files there are. For example, if we pack 1000 images into 4 physical files, then each file contains 250 images. If we then use 10 machines to train a DNN, we should be able to load approximately 100 images per machine. Some machines may need images from different physical files.</li>
 </ul>
 
 <h3 id="data-loading">Data Loading</h3>
-
 <p>The next step to consider is how to load the packed data into RAM.
 Our goal is to load the data as quickly as possible.
-There are several heuristics we try to follow:
-- <strong>Read continuously:</strong> We can read faster when reading from contiguous locations on disk.
-- <strong>Reduce the bytes to be loaded:</strong> We can achieve this by storing data in a compact way, e.g. saving images in JPEG format.
-- <strong>Load and train in different threads:</strong> This avoids computational bottlenecks while loading data.
-- <strong>Save RAM:</strong> Judiciously decide whether to load entire files into RAM.</p>
+There are several heuristics we try to follow:</p>
+<ul>
+  <li><strong>Read continuously:</strong> We can read faster when reading from contiguous locations on disk.</li>
+  <li><strong>Reduce the bytes to be loaded:</strong> We can achieve this by storing data in a compact way, e.g. saving images in JPEG format.</li>
+  <li><strong>Load and train in different threads:</strong> This avoids computational bottlenecks while loading data.</li>
+  <li><strong>Save RAM:</strong> Judiciously decide whether to load entire files into RAM.</li>
+</ul>
 
 <h2 id="data-format">Data Format</h2>
 
@@ -390,15 +515,17 @@
 
 <h3 id="binary-record">Binary Record</h3>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/baserecordio.jpg" alt="baserecordio">
-In MXNet&#39;s binary RecordIO, we store each data instance as a record.
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/baserecordio.jpg" alt="baserecordio" />
+In MXNet’s binary RecordIO, we store each data instance as a record.
 <strong>kMagic</strong> is a <em>magic number</em> indicating the start of a record.
 <strong>Lrecord</strong> encodes length and a continue flag.
-In lrecord,
-- cflag == 0: this is a complete record
-- cflag == 1: start of a multiple-records
-- cflag == 2: middle of multiple-records
-- cflag == 3: end of multiple-records</p>
+In lrecord,</p>
+<ul>
+  <li>cflag == 0: this is a complete record</li>
+  <li>cflag == 1: start of a multiple-records</li>
+  <li>cflag == 2: middle of multiple-records</li>
+  <li>cflag == 3: end of multiple-records</li>
+</ul>
 
 <p><strong>Data</strong> is the space to save data content.
 <strong>Pad</strong> is simply a padding space to make record align to 4 bytes.</p>
@@ -423,14 +550,14 @@
 they only occupy about <strong>35G</strong> of disk space.
 This significantly reduces the cost owing to reading from disk.</p>
 
-<p>Here&#39;s an example of binary recordIO:
-<img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/ImageRecordIO.jpg" alt="baserecordio">
+<p>Here’s an example of binary recordIO:
+<img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/ImageRecordIO.jpg" alt="baserecordio" />
 We first resize the image into 256 * 256,
 then compress into JPEG format.
 After that, we save a header that indicates the index and label
 for that image to be used when constructing the <em>Data</em> field for that record.
 We then pack several images together into a file.
-You may want to also review the <a href="https://mxnet.incubator.apache.org/tutorials/basic/data.html#loading-data-using-image-iterators">example using im2rec.py to create a RecordIO dataset</a>.</p>
+You may want to also review the <a href="https://mxnet.apache.org/api/faq/recordio">example using im2rec.py to create a RecordIO dataset</a>.</p>
 
 <h3 id="access-arbitrary-parts-of-data">Access Arbitrary Parts Of Data</h3>
 
@@ -442,28 +569,32 @@
 we can achieve the above goal using the InputSplit
 functionality provided by dmlc-core.</p>
 
-<p>InputSplit takes the following parameters:
-- FileSystem <em>filesys</em>: dmlc-core wrapper around the IO operations for different file systems, like hdfs, s3, local. User shouldn&#39;t need to worry about the difference between file systems anymore.
-- Char <em>uri</em>: The URI of files. Note that it could be a list of files because we may pack the data into several physical parts. File URIs are separated by &#39;;&#39;.
-- Unsigned <em>nsplit</em>: The number of logical splits. <em>nsplit</em> could be different from the number of physical files.
-- Unsigned <em>rank</em>: Which split to load in this process.</p>
-
-<p>The splitting process is demonstrated below:
-- Determine the size of each partition.</p>
-
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/beforepartition.jpg" alt="beforepartition"></p>
-
+<p>InputSplit takes the following parameters:</p>
 <ul>
-<li>Approximately partition the records according to file size. Note that the boundary of each part may be located in the middle of a record.</li>
+  <li>FileSystem <em>filesys</em>: dmlc-core wrapper around the IO operations for different file systems, like hdfs, s3, local. User shouldn’t need to worry about the difference between file systems anymore.</li>
+  <li>Char <em>uri</em>: The URI of files. Note that it could be a list of files because we may pack the data into several physical parts. File URIs are separated by ‘;’.</li>
+  <li>Unsigned <em>nsplit</em>: The number of logical splits. <em>nsplit</em> could be different from the number of physical files.</li>
+  <li>Unsigned <em>rank</em>: Which split to load in this process.</li>
 </ul>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/approximatepartition.jpg" alt="approxipartition"></p>
-
+<p>The splitting process is demonstrated below:</p>
 <ul>
-<li> Set the beginning of partitions in such a way as to avoid splitting records across partitions.</li>
+  <li>Determine the size of each partition.</li>
 </ul>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/afterpartition.jpg" alt="afterpartition"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/beforepartition.jpg" alt="beforepartition" /></p>
+
+<ul>
+  <li>Approximately partition the records according to file size. Note that the boundary of each part may be located in the middle of a record.</li>
+</ul>
+
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/approximatepartition.jpg" alt="approxipartition" /></p>
+
+<ul>
+  <li>Set the beginning of partitions in such a way as to avoid splitting records across partitions.</li>
+</ul>
+
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/afterpartition.jpg" alt="afterpartition" /></p>
 
 <p>By conducting the above operations,
 we now identify the records belong to each part,
@@ -473,14 +604,14 @@
 
 <p>Since our partitioning scheme does not depend on the number of physical data files,
 we can process a huge dataset like ImageNet_22K in parallel fashion as illustrated below.
-We don&#39;t need to consider distributed loading issue at the preparation time,
+We don’t need to consider distributed loading issue at the preparation time,
 just select the most efficient physical file number
 according to the dataset size and computing resources available.
-<img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/parallelprepare.jpg" alt="parallelprepare"></p>
+<img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/parallelprepare.jpg" alt="parallelprepare" /></p>
 
 <h2 id="data-loading-and-preprocessing">Data Loading and Preprocessing</h2>
 
-<p>When the speed of loading and preprocessing can&#39;t keep up
+<p>When the speed of loading and preprocessing can’t keep up
 with the speed of training or evaluation,
 IO can bottleneck the speed of the whole system.
 In this section, we will introduce a few tricks
@@ -493,12 +624,14 @@
 
 <p>When training deep neural networks,
 we sometimes must load and preprocess the data
-while simultaneously training for the following reasons:
-- When the whole size of the dataset exceeds available RAM size, we can&#39;t load it in advance;
-- Sometimes, to make models robust to things like translations, rotations, and small amounts of color shift of noise, we introduce randomness into the training process. In these cases we must re-preprocess the data each time we revisit an example.</p>
+while simultaneously training for the following reasons:</p>
+<ul>
+  <li>When the whole size of the dataset exceeds available RAM size, we can’t load it in advance;</li>
+  <li>Sometimes, to make models robust to things like translations, rotations, and small amounts of color shift of noise, we introduce randomness into the training process. In these cases we must re-preprocess the data each time we revisit an example.</li>
+</ul>
 
 <p>In service of efficiency, we also address multi-threading techniques. Taking Imagenet training as an example, after loading a bunch of image records, we can start multiple threads to simultaneously perform image decoding and image augmentation. We depict this process in the following illustration:
-<img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/process.jpg" alt="process"></p>
+<img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/process.jpg" alt="process" /></p>
 
 <h3 id="hide-io-cost-using-threadediter">Hide IO Cost Using Threadediter</h3>
 
@@ -511,20 +644,20 @@
 while the main thread acts as a data consumer as illustrated below.</p>
 
 <p>The threadediter maintains a buffer of a certain size
-and automatically fills the buffer when it&#39;s not full.
+and automatically fills the buffer when it’s not full.
 And after the consumer finishes consuming part of the data in the buffer,
 threadediter will reuse the space to save the next part of data.
-<img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/threadediter.png" alt="threadediter"></p>
+<img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/io/threadediter.png" alt="threadediter" /></p>
 
 <h2 id="mxnet-io-python-interface">MXNet IO Python Interface</h2>
-
 <p>We make the IO object as an iterator in numpy.
 By achieving that, the user can easily access the data
 using a for-loop or calling next() function.
 Defining a data iterator is very similar to defining a symbolic operator in MXNet.</p>
 
 <p>The following example code demonstrates a Cifar data iterator.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python"><span class="n">dataiter</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">io</span><span class="o">.</span><span class="n">ImageRecordIter</span><span class="p">(</span>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code><span class="n">dataiter</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">io</span><span class="o">.</span><span class="n">ImageRecordIter</span><span class="p">(</span>
     <span class="c1"># Dataset Parameter, indicating the data file, please check the data is already there
 </span>    <span class="n">path_imgrec</span><span class="o">=</span><span class="s">"data/cifar/train.rec"</span><span class="p">,</span>
     <span class="c1"># Dataset Parameter, indicating the image size after preprocessing
@@ -547,26 +680,27 @@
 </span>    <span class="n">ctx</span><span class="o">=</span><span class="s">"gpu"</span><span class="p">,</span>
     <span class="c1"># The out data type, could be 'float32' 'int8' or 'uint8'
 </span>    <span class="n">dtype</span><span class="o">=</span><span class="s">"float32"</span><span class="p">)</span>
-</code></pre></div>
+</code></pre></div></div>
+
 <p>Generally, to create a data iterator, you need to provide five kinds of parameters:</p>
 
 <ul>
-<li><strong>Dataset Param:</strong> Information needed to access the dataset, e.g. file path, input shape.</li>
-<li><strong>Batch Param:</strong> Specifies how to form a batch, e.g. batch size.</li>
-<li><strong>Augmentation Param:</strong> Which augmentation operations (e.g. crop, mirror) should be taken on an input image.</li>
-<li><strong>Backend Param:</strong> Controls the behavior of the backend threads to hide data loading cost.</li>
-<li><strong>Auxiliary Param:</strong> Provides options to help with debugging.</li>
+  <li><strong>Dataset Param:</strong> Information needed to access the dataset, e.g. file path, input shape.</li>
+  <li><strong>Batch Param:</strong> Specifies how to form a batch, e.g. batch size.</li>
+  <li><strong>Augmentation Param:</strong> Which augmentation operations (e.g. crop, mirror) should be taken on an input image.</li>
+  <li><strong>Backend Param:</strong> Controls the behavior of the backend threads to hide data loading cost.</li>
+  <li><strong>Auxiliary Param:</strong> Provides options to help with debugging.</li>
 </ul>
 
 <p>Usually, <strong>Dataset Param</strong> and <strong>Batch Param</strong> MUST be given,
-otherwise the data batch can&#39;t be created.
+otherwise the data batch can’t be created.
 Other parameters can be given as needed.
 Ideally, we should separate the MX Data IO into modules,
 some of which might be useful to expose to users, for example:</p>
 
 <ul>
-<li><strong>Efficient prefetcher:</strong> allows the user to write a data loader that reads their customized binary format that automatically gets multi-threaded prefetcher support.</li>
-<li><strong>Data transformer:</strong> image random cropping, mirroring, etc. Allows the users to use those tools, or plug in their own customized transformers (maybe they want to add some specific kind of coherent random noise to data, etc.)</li>
+  <li><strong>Efficient prefetcher:</strong> allows the user to write a data loader that reads their customized binary format that automatically gets multi-threaded prefetcher support.</li>
+  <li><strong>Data transformer:</strong> image random cropping, mirroring, etc. Allows the users to use those tools, or plug in their own customized transformers (maybe they want to add some specific kind of coherent random noise to data, etc.)</li>
 </ul>
 
 <h2 id="future-extensions">Future Extensions</h2>
@@ -591,8 +725,7 @@
             <div class="col-4">
                 <h4 class="footer-category-title">Resources</h4>
                 <ul class="contact-list">
-                    <li><a class="u-email" href="mailto:dev@mxnet.apache.org">Dev list</a></li>
-                    <li><a class="u-email" href="mailto:user@mxnet.apache.org">User mailing list</a></li>
+                    <li><a href="/community/contribute#mxnet-dev-communications">Mailing lists</a></li>
                     <li><a href="https://cwiki.apache.org/confluence/display/MXNET/Apache+MXNet+Home">Developer Wiki</a></li>
                     <li><a href="https://issues.apache.org/jira/projects/MXNET/issues">Jira Tracker</a></li>
                     <li><a href="https://github.com/apache/incubator-mxnet/labels/Roadmap">Github Roadmap</a></li>
diff --git a/api/architecture/note_engine.html b/api/architecture/note_engine.html
index b506a5b..a75108a 100644
--- a/api/architecture/note_engine.html
+++ b/api/architecture/note_engine.html
@@ -5,23 +5,23 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"><!-- Begin Jekyll SEO tag v2.6.1 -->
 <title>Dependency Engine | Apache MXNet</title>
-<meta name="generator" content="Jekyll v3.8.6" />
+<meta name="generator" content="Jekyll v4.0.0" />
 <meta property="og:title" content="Dependency Engine" />
 <meta property="og:locale" content="en_US" />
 <meta name="description" content="A flexible and efficient library for deep learning." />
 <meta property="og:description" content="A flexible and efficient library for deep learning." />
-<link rel="canonical" href="https://mxnet.incubator.apache.org/api/architecture/note_engine" />
-<meta property="og:url" content="https://mxnet.incubator.apache.org/api/architecture/note_engine" />
+<link rel="canonical" href="https://mxnet-beta.staged.apache.org//api/architecture/note_engine" />
+<meta property="og:url" content="https://mxnet-beta.staged.apache.org//api/architecture/note_engine" />
 <meta property="og:site_name" content="Apache MXNet" />
 <script type="application/ld+json">
-{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Dependency Engine","url":"https://mxnet.incubator.apache.org/api/architecture/note_engine","@context":"https://schema.org"}</script>
+{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Dependency Engine","url":"https://mxnet-beta.staged.apache.org//api/architecture/note_engine","@context":"https://schema.org"}</script>
 <!-- End Jekyll SEO tag -->
 <script src="https://medium-widget.pixelpoint.io/widget.js"></script>
-  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet.incubator.apache.org/feed.xml" title="Apache MXNet" /><script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
-
-  <script src="/assets/js/clipboard.js"></script>
-  <script src="/assets/js/copycode.js"></script>
-</head>
+  <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css" />
+  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet-beta.staged.apache.org//feed.xml" title="Apache MXNet" /><script src="/assets/js/jquery-3.3.1.min.js"></script><script src="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.js" defer></script>
+  <script src="/assets/js/globalSearch.js" defer></script>
+  <script src="/assets/js/clipboard.js" defer></script>
+  <script src="/assets/js/copycode.js" defer></script></head>
 <body><header class="site-header" role="banner">
 
   <script>
@@ -59,14 +59,173 @@
             </svg>
           </span>
       </label>
-
+      <div class="gs-search-border">
+        <div id="gs-search-icon"></div>
+        <form id="global-search-form">
+          <input id="global-search" type="text" title="Search" placeholder="Search" />
+          <div id="global-search-dropdown-container">
+            <button class="gs-current-version btn" type="button" data-toggle="dropdown">
+                <span id="gs-current-version-label">master</span>
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+          <span id="global-search-close">x</span>
+        </form>
+      </div>
       <div class="trigger">
+        <div id="global-search-mobile-border">
+          <div id="gs-search-icon-mobile"></div>
+          <input id="global-search-mobile" placeholder="Search..." type="text"/>
+          <div id="global-search-dropdown-container-mobile">
+            <button class="gs-current-version-mobile btn" type="button" data-toggle="dropdown">
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown-mobile">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+        </div>
         <a class="page-link" href="/get_started">Get Started</a>
         <a class="page-link" href="/blog">Blog</a>
         <a class="page-link" href="/features">Features</a>
         <a class="page-link" href="/ecosystem">Ecosystem</a>
         <a class="page-link" href="/api">Docs & Tutorials</a>
         <a class="page-link" href="https://github.com/apache/incubator-mxnet">GitHub</a>
+        <div class="dropdown">
+          <span class="dropdown-header">master
+            <svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
+          </span>
+          <div class="dropdown-content">
+            
+              
+                <a class="dropdown-option-active" href="/">master</a>
+              
+            
+              
+                <a href="/versions/1.6/">1.6</a>
+              
+            
+              
+                <a href="/versions/1.5.0/">1.5.0</a>
+              
+            
+              
+                <a href="/versions/1.4.1/">1.4.1</a>
+              
+            
+              
+                <a href="/versions/1.3.1/">1.3.1</a>
+              
+            
+              
+                <a href="/versions/1.2.1/">1.2.1</a>
+              
+            
+              
+                <a href="/versions/1.1.0/">1.1.0</a>
+              
+            
+              
+                <a href="/versions/1.0.0/">1.0.0</a>
+              
+            
+              
+                <a href="/versions/0.12.1/">0.12.1</a>
+              
+            
+              
+                <a href="/versions/0.11.0/">0.11.0</a>
+              
+            
+          </div>
+        </div>
       </div>
     </nav>
   </div>
@@ -128,8 +287,6 @@
             
               <!-- page-category -->
             
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/exception_handling">Exception Handling in MXNet</a></li>
               <!-- page-category -->
@@ -210,22 +367,6 @@
             
               <!-- page-category -->
             
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/note_data_loading">Efficient Data Loaders</a></li>
               <!-- page-category -->
@@ -238,14 +379,14 @@
             <li><a href="/api/architecture/note_memory">Memory Consumption</a></li>
               <!-- page-category -->
             
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/overview">MXNet System Architecture</a></li>
               <!-- page-category -->
             
               <!-- page-category -->
             
+              <!-- page-category -->
+            
             
             <li><a href="/api/architecture/program_model">Deep Learning Programming Paradigm</a></li>
               <!-- page-category -->
@@ -281,42 +422,25 @@
               <!-- page-category -->
             
               <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
                <!-- resource-p -->
         </ul>
     </div>
     <div class="col-9">
         <!--- Licensed to the Apache Software Foundation (ASF) under one -->
-
 <!--- or more contributor license agreements.  See the NOTICE file -->
-
 <!--- distributed with this work for additional information -->
-
 <!--- regarding copyright ownership.  The ASF licenses this file -->
-
 <!--- to you under the Apache License, Version 2.0 (the -->
-
 <!--- "License"); you may not use this file except in compliance -->
-
 <!--- with the License.  You may obtain a copy of the License at -->
 
 <!---   http://www.apache.org/licenses/LICENSE-2.0 -->
 
 <!--- Unless required by applicable law or agreed to in writing, -->
-
 <!--- software distributed under the License is distributed on an -->
-
 <!--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -->
-
 <!--- KIND, either express or implied.  See the License for the -->
-
 <!--- specific language governing permissions and limitations -->
-
 <!--- under the License. -->
 
 <h1 id="dependency-engine-for-deep-learning">Dependency Engine for Deep Learning</h1>
@@ -354,25 +478,27 @@
 and build a library to automatically parallelize our programs
 in an asynchronous way?</p>
 
-<p>For example, in the following code, we can run <code>B = A + 1</code>
-and <code>C = A + 2</code> in any order, or in parallel:</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="n">A</span> <span class="o">=</span> <span class="mi">2</span>
+<p>For example, in the following code, we can run <code class="highlighter-rouge">B = A + 1</code>
+and <code class="highlighter-rouge">C = A + 2</code> in any order, or in parallel:</p>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="n">A</span> <span class="o">=</span> <span class="mi">2</span>
     <span class="n">B</span> <span class="o">=</span> <span class="n">A</span> <span class="o">+</span> <span class="mi">1</span>
     <span class="n">C</span> <span class="o">=</span> <span class="n">A</span> <span class="o">+</span> <span class="mi">2</span>
     <span class="n">D</span> <span class="o">=</span> <span class="n">B</span> <span class="o">*</span> <span class="n">C</span>
-</code></pre></div>
-<p>However, it&#39;s quite hard to code the sequence manually
-because the last operation, <code>D = B * C</code>, needs to wait
+</code></pre></div></div>
+
+<p>However, it’s quite hard to code the sequence manually
+because the last operation, <code class="highlighter-rouge">D = B * C</code>, needs to wait
 for both of the preceding operations to complete before it starts.
 The following dependency graph/data flow graph illustrates this.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_simple.png" alt="Dep Simple"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_simple.png" alt="Dep Simple" /></p>
 
 <p>A dependency engine is a library that takes a sequence of operations
 and schedules them according to the dependency pattern,  potentially in parallel.
 So in this example, a dependency library
-could run <code>B = A + 1</code> and <code>C = A + 2</code> in parallel,
-and run <code>D = B * C</code> after those operations complete.</p>
+could run <code class="highlighter-rouge">B = A + 1</code> and <code class="highlighter-rouge">C = A + 2</code> in parallel,
+and run <code class="highlighter-rouge">D = B * C</code> after those operations complete.</p>
 
 <h2 id="problems-in-dependency-scheduling">Problems in Dependency Scheduling</h2>
 
@@ -382,38 +508,35 @@
 In this section, we discuss those problems.</p>
 
 <h3 id="data-flow-dependency">Data Flow Dependency</h3>
-
 <p>Data flow dependency describes how the outcome of one computation
 can be used in other computations.
 Every dependency engine has to solve the data flow dependency problem.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_simple.png" alt="Dep Simple"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_simple.png" alt="Dep Simple" /></p>
 
 <p>Because we discussed this issue in the preceding section,
 we include the same figure here. Libraries that have
 data flow tracking engines include Minerva and Purine2.</p>
 
 <h3 id="memory-recycling">Memory Recycling</h3>
-
 <p>When should we recycle the memory that we allocated to the arrays?
 In serial processing, this is easy to determine.
 We simply recycle the memory after the variable goes out of scope.
 However, as the following figure shows, this is a bit harder in parallel processing.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_del.png" alt="Dep Del"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_del.png" alt="Dep Del" /></p>
 
 <p>In this example, because both computations need to use values from A,
-we can&#39;t recycle the memory until both complete.
+we can’t recycle the memory until both complete.
 The engine must schedule the memory recycling operations according to the dependencies,
-and ensure that they are executed after both <code>B = A + 1</code> and <code>C = A + 2</code> complete.</p>
+and ensure that they are executed after both <code class="highlighter-rouge">B = A + 1</code> and <code class="highlighter-rouge">C = A + 2</code> complete.</p>
 
 <h3 id="random-number-generation">Random Number Generation</h3>
-
 <p>Random number generators, which are commonly used in machine learning,
 pose interesting challenges for dependency engines.
 Consider the following example:</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_rand.png" alt="Dep Rand"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_rand.png" alt="Dep Rand" /></p>
 
 <p>In this example, we are generating random numbers in a sequence.
 Although it seems that the two random number generations can be parallelized,
@@ -429,10 +552,11 @@
 <p>In the last section, we discussed the problems
 we might face in designing a dependency engine.
 Before thinking about how to design a generic engine to solve those problems,
-let&#39;s consider how a dependency engine can help in multi-GPU training of a neural network.
+let’s consider how a dependency engine can help in multi-GPU training of a neural network.
 The following pseudocode Python program illustrates
 training one batch on a  two-layer neural network.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="c1"># Example of one iteration Two GPU neural Net
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="c1"># Example of one iteration Two GPU neural Net
 </span>    <span class="n">data</span> <span class="o">=</span> <span class="n">next_batch</span><span class="p">()</span>
     <span class="n">data</span><span class="p">[</span><span class="n">gpu0</span><span class="p">]</span><span class="o">.</span><span class="n">copyfrom</span><span class="p">(</span><span class="n">data</span><span class="p">[</span><span class="mi">0</span><span class="p">:</span><span class="mi">50</span><span class="p">])</span>
     <span class="n">data</span><span class="p">[</span><span class="n">gpu1</span><span class="p">]</span><span class="o">.</span><span class="n">copyfrom</span><span class="p">(</span><span class="n">data</span><span class="p">[</span><span class="mi">50</span><span class="p">:</span><span class="mi">100</span><span class="p">])</span>
@@ -457,7 +581,7 @@
     <span class="n">fc2_weight</span><span class="p">[</span><span class="n">cpu</span><span class="p">]</span> <span class="o">-=</span> <span class="n">lr</span> <span class="o">*</span>  <span class="n">fc2_wgrad</span><span class="p">[</span><span class="n">cpu</span><span class="p">]</span>
     <span class="n">fc1_weight</span><span class="p">[</span><span class="n">cpu</span><span class="p">]</span><span class="o">.</span><span class="n">copyto</span><span class="p">(</span><span class="n">fc1_weight</span><span class="p">[</span><span class="n">gpu0</span><span class="p">]</span> <span class="p">,</span> <span class="n">fc1_weight</span><span class="p">[</span><span class="n">gpu1</span><span class="p">])</span>
     <span class="n">fc2_weight</span><span class="p">[</span><span class="n">cpu</span><span class="p">]</span><span class="o">.</span><span class="n">copyto</span><span class="p">(</span><span class="n">fc2_weight</span><span class="p">[</span><span class="n">gpu0</span><span class="p">]</span> <span class="p">,</span> <span class="n">fc2_weight</span><span class="p">[</span><span class="n">gpu1</span><span class="p">])</span>
-</code></pre></div>
+</code></pre></div></div>
 <p>In this program, the data 0 to 50  is copied to GPU 0,
 and the data 50 to 100 is copied to GPU 1.
 The calculated gradients are aggregated in the CPU,
@@ -466,21 +590,21 @@
 This is a common way to write a parallel program in a serial manner.
 The following dependency graph shows how it can be parallelized:</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_net.png" alt="Dep Net"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_net.png" alt="Dep Net" /></p>
 
 <p><strong><em>Notes:</em></strong></p>
 
 <ul>
-<li>The gradient can be copied to the CPU as soon as we get the gradient of a layer.</li>
-<li>The weight can be copied back soon as the weight is updated.</li>
-<li>In the forward pass, we have a dependency on <code>fc1_weight[cpu].copyto(fc1_weight[gpu0] , fc1_weight[gpu1])</code>
+  <li>The gradient can be copied to the CPU as soon as we get the gradient of a layer.</li>
+  <li>The weight can be copied back soon as the weight is updated.</li>
+  <li>In the forward pass, we have a dependency on <code class="highlighter-rouge">fc1_weight[cpu].copyto(fc1_weight[gpu0] , fc1_weight[gpu1])</code>
 from the previous iteration.</li>
-<li>There is a delay in computation between the last backward pass to layer k and the next forward call to layer k. We can synchronize the weight of layer k <em>in parallel</em> with other computation during this delay.</li>
+  <li>There is a delay in computation between the last backward pass to layer k and the next forward call to layer k. We can synchronize the weight of layer k <em>in parallel</em> with other computation during this delay.</li>
 </ul>
 
 <p>This approach to optimization is used by multi-GPU deep learning libraries, such as CXXNet.
 The point is to overlap weight synchronization (communication) with computation.
-However, it&#39;s not easy to do that, because the copy operation needs to be triggered
+However, it’s not easy to do that, because the copy operation needs to be triggered
 as soon as the backward pass of the layer completes,
 which then triggers the reduction, updates, etc.</p>
 
@@ -489,114 +613,117 @@
 
 <h2 id="designing-a-generic-dependency-engine">Designing a Generic Dependency Engine</h2>
 
-<p>We hope that you&#39;re convinced that a dependency engine is useful
+<p>We hope that you’re convinced that a dependency engine is useful
 for scaling deep learning programs to multiple devices.
-Now let&#39;s discuss how we can design and implement
+Now let’s discuss how we can design and implement
 a generic interface for a dependency engine.
-This solution isn&#39;t the only possible design for a dependency engine.
-It&#39;s an example that we think is useful in most cases.</p>
+This solution isn’t the only possible design for a dependency engine.
+It’s an example that we think is useful in most cases.</p>
 
 <p>Our goal is to create a dependency engine that is <em>generic</em> and <em>lightweight</em>.
-Ideally, we&#39;d like the engine that easily plugs into existing deep learning code,
+Ideally, we’d like the engine that easily plugs into existing deep learning code,
 and that can scale up to multiple machines with minor modifications.
 To do that, we need to focus only on dependency tracking,
-not on assumptions about what users can or can&#39;t do.</p>
+not on assumptions about what users can or can’t do.</p>
 
-<p>Here&#39;s a summary of goals for the engine:</p>
+<p>Here’s a summary of goals for the engine:</p>
 
 <ul>
-<li>The engine should not be aware of what operations it performs, so that users can perform any operations they define.</li>
-<li>It should not be restricted in what type of objects it can schedule.
-
-<ul>
-<li>We should be able to schedule dependencies on GPU and CPU memory.</li>
-<li>We should be able to track dependencies on the random number generator, etc.</li>
-</ul></li>
-<li>The engine should not allocate resources. It should only track dependencies. Users can allocate their own memory, PRNG, etc.</li>
+  <li>The engine should not be aware of what operations it performs, so that users can perform any operations they define.</li>
+  <li>It should not be restricted in what type of objects it can schedule.
+    <ul>
+      <li>We should be able to schedule dependencies on GPU and CPU memory.</li>
+      <li>We should be able to track dependencies on the random number generator, etc.</li>
+    </ul>
+  </li>
+  <li>The engine should not allocate resources. It should only track dependencies. Users can allocate their own memory, PRNG, etc.</li>
 </ul>
 
 <p>The following Python snippet provides an engine interface that might help us reach our goal. Note that a real implementation will be closer to the metal, typically in C++.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="k">class</span> <span class="nc">DepEngine</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
-        <span class="k">def</span> <span class="nf">new_variable</span><span class="p">():</span>
-            <span class="s">"""Return a new variable tag
-            Returns
-            -------
-            vtag : Variable Tag
-                The token of the engine to represent dependencies.
-            """</span>
-            <span class="k">pass</span>
 
-        <span class="k">def</span> <span class="nf">push</span><span class="p">(</span><span class="n">exec_func</span><span class="p">,</span> <span class="n">read_vars</span><span class="p">,</span> <span class="n">mutate_vars</span><span class="p">):</span>
-            <span class="s">"""Push the operation to the engine.
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">class</span> <span class="nc">DepEngine</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+	    <span class="k">def</span> <span class="nf">new_variable</span><span class="p">():</span>
+		    <span class="s">"""Return a new variable tag
+		    Returns
+		    -------
+		    vtag : Variable Tag
+		        The token of the engine to represent dependencies.
+		    """</span>
+		    <span class="k">pass</span>
 
-            Parameters
-            ----------
-            exec_func : callable
-                The real operation to be performed.
+	    <span class="k">def</span> <span class="nf">push</span><span class="p">(</span><span class="n">exec_func</span><span class="p">,</span> <span class="n">read_vars</span><span class="p">,</span> <span class="n">mutate_vars</span><span class="p">):</span>
+		    <span class="s">"""Push the operation to the engine.
 
-            read_vars : list of Variable Tags
-                The list of variables this operation will read from.
+		    Parameters
+		    ----------
+		    exec_func : callable
+			    The real operation to be performed.
 
-            mutate_vars : list of Variable Tags
-                The list of variables this operation will mutate.
-            """</span>
-            <span class="k">pass</span>
-</code></pre></div>
-<p>Because we can&#39;t make assumptions about what objects we are scheduling, we ask the user to allocate a
+		    read_vars : list of Variable Tags
+			    The list of variables this operation will read from.
+
+		    mutate_vars : list of Variable Tags
+			    The list of variables this operation will mutate.
+		    """</span>
+		    <span class="k">pass</span>
+</code></pre></div></div>
+
+<p>Because we can’t make assumptions about what objects we are scheduling, we ask the user to allocate a
 <em>virtual tag</em> that is associated with each object to represent what we need to schedule.
 So, at the beginning, the user can allocate the variable tag,
 and attach it to each of the objects that we want to schedule.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/tag_var.png" alt="Dep Net"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/tag_var.png" alt="Dep Net" /></p>
 
-<p>The user then calls <code>push</code> to tell the engine about the function to execute.
+<p>The user then calls <code class="highlighter-rouge">push</code> to tell the engine about the function to execute.
 The user also needs to specify the dependencies of the operation,
-using <code>read_vars</code> and <code>write_vars</code>:</p>
+using <code class="highlighter-rouge">read_vars</code> and <code class="highlighter-rouge">write_vars</code>:</p>
 
 <ul>
-<li><code>read_vars</code> are variable tags for objects that the operation will <em>read from</em>, without changing their internal state.</li>
-<li><code>mutate_vars</code> are variable tags for objects whose internal states the operation will mutate.</li>
+  <li><code class="highlighter-rouge">read_vars</code> are variable tags for objects that the operation will <em>read from</em>, without changing their internal state.</li>
+  <li><code class="highlighter-rouge">mutate_vars</code> are variable tags for objects whose internal states the operation will mutate.</li>
 </ul>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/push_var.png" alt="Push Op"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/push_var.png" alt="Push Op" /></p>
 
-<p>The preceding figure shows how to push operation <code>B = A + 1</code> to the dependency engine. <code>B.data</code> and
-<code>A.data</code> are the allocated space. Note that the engine is <em>only aware of variable tags</em>.
+<p>The preceding figure shows how to push operation <code class="highlighter-rouge">B = A + 1</code> to the dependency engine. <code class="highlighter-rouge">B.data</code> and
+<code class="highlighter-rouge">A.data</code> are the allocated space. Note that the engine is <em>only aware of variable tags</em>.
 Any execution function can be processed.
 This interface is generic for the operations and resources we want to schedule.</p>
 
-<p>For fun, let&#39;s look at how the engine internals work with the tags by considering the following code snippet:</p>
-<div class="highlight"><pre><code class="language-" data-lang="">    B = A + 1
+<p>For fun, let’s look at how the engine internals work with the tags by considering the following code snippet:</p>
+
+<div class="highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    B = A + 1
     C = A + 2
     A = C * 2
     D = A + 3
-</code></pre></div>
-<p>The first line reads variable <code>A</code> and mutates variable <code>B</code>. The second line reads variable <code>A</code> and mutates variable <code>C</code>. And so on.</p>
+</code></pre></div></div>
+
+<p>The first line reads variable <code class="highlighter-rouge">A</code> and mutates variable <code class="highlighter-rouge">B</code>. The second line reads variable <code class="highlighter-rouge">A</code> and mutates variable <code class="highlighter-rouge">C</code>. And so on.</p>
 
 <p>The engine maintains a queue for each variable, as the following animation shows for each of the four lines. Green blocks represents a read action, while red blocks represent mutations.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_queue.gif" alt="Dependency Queue"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_queue.gif" alt="Dependency Queue" /></p>
 
-<p>Upon building this queue, the engine sees that the first two green blocks at the beginning of <code>A</code>&#39;s queue could actually be run in parallel because they are both read actions and won&#39;t conflict with each other. The following graph illustrates this point.</p>
+<p>Upon building this queue, the engine sees that the first two green blocks at the beginning of <code class="highlighter-rouge">A</code>’s queue could actually be run in parallel because they are both read actions and won’t conflict with each other. The following graph illustrates this point.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_parallel.png" alt="Dependency Parallelism"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/dep_parallel.png" alt="Dependency Parallelism" /></p>
 
-<p>One cool thing about all this scheduling is that it&#39;s not confined to numerical calculations.
+<p>One cool thing about all this scheduling is that it’s not confined to numerical calculations.
 Because everything that is scheduled is only a tag, the engine could schedule everything!</p>
 
 <p>The following figure gives a complete push sequence of the programs we mentioned in previous sections.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/push_seq.png" alt="Push Seq"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/push_seq.png" alt="Push Seq" /></p>
 
 <h3 id="porting-existing-code-to-the-dependency-engine">Porting Existing Code to the Dependency Engine</h3>
-
-<p>Because the generic interface doesn&#39;t control things like memory allocation and which operation to execute,
+<p>Because the generic interface doesn’t control things like memory allocation and which operation to execute,
 most existing code can be scheduled by the dependency engine in two steps:</p>
 
 <ol>
-<li>Allocate the variable tags associated with resources like memory blob, PRNGS.</li>
-<li>Call <code>push</code> with the execution function as the original code to execute, and put the variable tags of
-corresponding resources correctly in <code>read_vars</code> and <code>mutate_vars</code>.</li>
+  <li>Allocate the variable tags associated with resources like memory blob, PRNGS.</li>
+  <li>Call <code class="highlighter-rouge">push</code> with the execution function as the original code to execute, and put the variable tags of
+  corresponding resources correctly in <code class="highlighter-rouge">read_vars</code> and <code class="highlighter-rouge">mutate_vars</code>.</li>
 </ol>
 
 <h2 id="implementing-the-generic-dependency-engine">Implementing the Generic Dependency Engine</h2>
@@ -609,31 +736,30 @@
 <p>The general idea is as follows:</p>
 
 <ul>
-<li>Use a queue to track all of the pending dependencies on each variable tag.</li>
-<li>Use a counter on each operation to track how many dependencies are yet to be fulfilled.</li>
-<li>When operations are completed, update the state of the queue and dependency counters to schedule new operations.</li>
+  <li>Use a queue to track all of the pending dependencies on each variable tag.</li>
+  <li>Use a counter on each operation to track how many dependencies are yet to be fulfilled.</li>
+  <li>When operations are completed, update the state of the queue and dependency counters to schedule new operations.</li>
 </ul>
 
 <p>The following figure illustrates the scheduling algorithm
 and might give you a better sense of what is going on in the engine.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/engine_queue_step.png" alt="Dep Tracking"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/engine_queue_step.png" alt="Dep Tracking" /></p>
 
 <p>Below, we show another example involving random number generators.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/engine_queue_rand.png" alt="Dep Rand"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/engine/engine_queue_rand.png" alt="Dep Rand" /></p>
 
 <p>As you can see, the purpose of the algorithm is to update pending queues
 of operations and to make the right state transition when an operation has completed.
 More care should be taken to make sure the state transitions
-are done in a way that&#39;s safe for threads.</p>
+are done in a way that’s safe for threads.</p>
 
 <h3 id="separate-dependency-tracking-with-running-policy">Separate Dependency Tracking with Running Policy</h3>
-
-<p>If you&#39;re reading carefully, you might have noticed
+<p>If you’re reading carefully, you might have noticed
 that the preceding section shows only the algorithm
 for deciding when an operation can be executed.
-We didn&#39;t show how to actually run an operation.
+We didn’t show how to actually run an operation.
 In practice, there can be many different policies.
 For example, we can either use a global thread-pool to run all operations,
 or use a specific thread to run operations on each device.</p>
@@ -647,13 +773,12 @@
 <h2 id="discussion">Discussion</h2>
 
 <p>The design that we discussed in this article
-isn&#39;t the only solution to the dependency tracking problem.
-It&#39;s just one example of how we might approach this.
+isn’t the only solution to the dependency tracking problem.
+It’s just one example of how we might approach this.
 To be sure, some of these design choices are debatable.
-We&#39;ll discuss some of them in this section.</p>
+We’ll discuss some of them in this section.</p>
 
 <h3 id="dynamic-vs-static">Dynamic vs. Static</h3>
-
 <p>The dependency engine interface discussed in this topic is somewhat dynamic
 in the sense that the user can push operations one by one,
 instead of declaring the entire dependency graph (static).
@@ -665,7 +790,6 @@
 to the interface to enable data structure reuse.</p>
 
 <h3 id="mutation-vs-immutable">Mutation vs. Immutable</h3>
-
 <p>The generic engine interface presented in this page
 supports explicit scheduling for mutation.
 In a typical data flow engine, the data are usually immutable.
@@ -676,15 +800,14 @@
 <p>However, immutability presents several challenges:</p>
 
 <ul>
-<li>It&#39;s harder to schedule resource contention problems, as arise when dealing with random numbers and deletion.</li>
-<li>The engine usually needs to manage resources (memory, random number) to avoid conflicts. It&#39;s harder to plug in user-allocated space, etc.</li>
-<li>Preallocated static memory isn&#39;t available, again because the usual pattern is to write to a preallocated layer space, which is not supported if data is immutable.</li>
+  <li>It’s harder to schedule resource contention problems, as arise when dealing with random numbers and deletion.</li>
+  <li>The engine usually needs to manage resources (memory, random number) to avoid conflicts. It’s harder to plug in user-allocated space, etc.</li>
+  <li>Preallocated static memory isn’t available, again because the usual pattern is to write to a preallocated layer space, which is not supported if data is immutable.</li>
 </ul>
 
 <p>Allowing mutation mitigates these issues.</p>
 
 <h2 id="source-code-of-the-generic-dependency-engine">Source Code of the Generic Dependency Engine</h2>
-
 <p><a href="https://github.com/apache/incubator-mxnet">MXNet</a> provides an implementation
 of the generic dependency engine described in this page.
 We welcome your contributions.</p>
@@ -692,9 +815,8 @@
 <h2 id="next-steps">Next Steps</h2>
 
 <ul>
-<li><a href="note_memory">Squeeze the Memory Consumption of Deep Learning</a></li>
-<li><a href="note_data_loading">Efficient Data Loading Module for Deep Learning</a></li>
-<li><a href="http://mxnet.io/architecture/rnn_interface.html">Survey of RNN Interface</a></li>
+  <li><a href="note_memory">Squeeze the Memory Consumption of Deep Learning</a></li>
+  <li><a href="note_data_loading">Efficient Data Loading Module for Deep Learning</a></li>
 </ul>
 
     </div>
@@ -711,8 +833,7 @@
             <div class="col-4">
                 <h4 class="footer-category-title">Resources</h4>
                 <ul class="contact-list">
-                    <li><a class="u-email" href="mailto:dev@mxnet.apache.org">Dev list</a></li>
-                    <li><a class="u-email" href="mailto:user@mxnet.apache.org">User mailing list</a></li>
+                    <li><a href="/community/contribute#mxnet-dev-communications">Mailing lists</a></li>
                     <li><a href="https://cwiki.apache.org/confluence/display/MXNET/Apache+MXNet+Home">Developer Wiki</a></li>
                     <li><a href="https://issues.apache.org/jira/projects/MXNET/issues">Jira Tracker</a></li>
                     <li><a href="https://github.com/apache/incubator-mxnet/labels/Roadmap">Github Roadmap</a></li>
diff --git a/api/architecture/note_memory.html b/api/architecture/note_memory.html
index eb1848f..c868e3a 100644
--- a/api/architecture/note_memory.html
+++ b/api/architecture/note_memory.html
@@ -5,23 +5,23 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"><!-- Begin Jekyll SEO tag v2.6.1 -->
 <title>Memory Consumption | Apache MXNet</title>
-<meta name="generator" content="Jekyll v3.8.6" />
+<meta name="generator" content="Jekyll v4.0.0" />
 <meta property="og:title" content="Memory Consumption" />
 <meta property="og:locale" content="en_US" />
 <meta name="description" content="A flexible and efficient library for deep learning." />
 <meta property="og:description" content="A flexible and efficient library for deep learning." />
-<link rel="canonical" href="https://mxnet.incubator.apache.org/api/architecture/note_memory" />
-<meta property="og:url" content="https://mxnet.incubator.apache.org/api/architecture/note_memory" />
+<link rel="canonical" href="https://mxnet-beta.staged.apache.org//api/architecture/note_memory" />
+<meta property="og:url" content="https://mxnet-beta.staged.apache.org//api/architecture/note_memory" />
 <meta property="og:site_name" content="Apache MXNet" />
 <script type="application/ld+json">
-{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Memory Consumption","url":"https://mxnet.incubator.apache.org/api/architecture/note_memory","@context":"https://schema.org"}</script>
+{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Memory Consumption","url":"https://mxnet-beta.staged.apache.org//api/architecture/note_memory","@context":"https://schema.org"}</script>
 <!-- End Jekyll SEO tag -->
 <script src="https://medium-widget.pixelpoint.io/widget.js"></script>
-  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet.incubator.apache.org/feed.xml" title="Apache MXNet" /><script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
-
-  <script src="/assets/js/clipboard.js"></script>
-  <script src="/assets/js/copycode.js"></script>
-</head>
+  <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css" />
+  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet-beta.staged.apache.org//feed.xml" title="Apache MXNet" /><script src="/assets/js/jquery-3.3.1.min.js"></script><script src="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.js" defer></script>
+  <script src="/assets/js/globalSearch.js" defer></script>
+  <script src="/assets/js/clipboard.js" defer></script>
+  <script src="/assets/js/copycode.js" defer></script></head>
 <body><header class="site-header" role="banner">
 
   <script>
@@ -59,14 +59,173 @@
             </svg>
           </span>
       </label>
-
+      <div class="gs-search-border">
+        <div id="gs-search-icon"></div>
+        <form id="global-search-form">
+          <input id="global-search" type="text" title="Search" placeholder="Search" />
+          <div id="global-search-dropdown-container">
+            <button class="gs-current-version btn" type="button" data-toggle="dropdown">
+                <span id="gs-current-version-label">master</span>
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+          <span id="global-search-close">x</span>
+        </form>
+      </div>
       <div class="trigger">
+        <div id="global-search-mobile-border">
+          <div id="gs-search-icon-mobile"></div>
+          <input id="global-search-mobile" placeholder="Search..." type="text"/>
+          <div id="global-search-dropdown-container-mobile">
+            <button class="gs-current-version-mobile btn" type="button" data-toggle="dropdown">
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown-mobile">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+        </div>
         <a class="page-link" href="/get_started">Get Started</a>
         <a class="page-link" href="/blog">Blog</a>
         <a class="page-link" href="/features">Features</a>
         <a class="page-link" href="/ecosystem">Ecosystem</a>
         <a class="page-link" href="/api">Docs & Tutorials</a>
         <a class="page-link" href="https://github.com/apache/incubator-mxnet">GitHub</a>
+        <div class="dropdown">
+          <span class="dropdown-header">master
+            <svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
+          </span>
+          <div class="dropdown-content">
+            
+              
+                <a class="dropdown-option-active" href="/">master</a>
+              
+            
+              
+                <a href="/versions/1.6/">1.6</a>
+              
+            
+              
+                <a href="/versions/1.5.0/">1.5.0</a>
+              
+            
+              
+                <a href="/versions/1.4.1/">1.4.1</a>
+              
+            
+              
+                <a href="/versions/1.3.1/">1.3.1</a>
+              
+            
+              
+                <a href="/versions/1.2.1/">1.2.1</a>
+              
+            
+              
+                <a href="/versions/1.1.0/">1.1.0</a>
+              
+            
+              
+                <a href="/versions/1.0.0/">1.0.0</a>
+              
+            
+              
+                <a href="/versions/0.12.1/">0.12.1</a>
+              
+            
+              
+                <a href="/versions/0.11.0/">0.11.0</a>
+              
+            
+          </div>
+        </div>
       </div>
     </nav>
   </div>
@@ -128,8 +287,6 @@
             
               <!-- page-category -->
             
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/exception_handling">Exception Handling in MXNet</a></li>
               <!-- page-category -->
@@ -210,22 +367,6 @@
             
               <!-- page-category -->
             
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/note_data_loading">Efficient Data Loaders</a></li>
               <!-- page-category -->
@@ -238,14 +379,14 @@
             <li><a href="/api/architecture/note_memory">Memory Consumption</a></li>
               <!-- page-category -->
             
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/overview">MXNet System Architecture</a></li>
               <!-- page-category -->
             
               <!-- page-category -->
             
+              <!-- page-category -->
+            
             
             <li><a href="/api/architecture/program_model">Deep Learning Programming Paradigm</a></li>
               <!-- page-category -->
@@ -281,42 +422,25 @@
               <!-- page-category -->
             
               <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
                <!-- resource-p -->
         </ul>
     </div>
     <div class="col-9">
         <!--- Licensed to the Apache Software Foundation (ASF) under one -->
-
 <!--- or more contributor license agreements.  See the NOTICE file -->
-
 <!--- distributed with this work for additional information -->
-
 <!--- regarding copyright ownership.  The ASF licenses this file -->
-
 <!--- to you under the Apache License, Version 2.0 (the -->
-
 <!--- "License"); you may not use this file except in compliance -->
-
 <!--- with the License.  You may obtain a copy of the License at -->
 
 <!---   http://www.apache.org/licenses/LICENSE-2.0 -->
 
 <!--- Unless required by applicable law or agreed to in writing, -->
-
 <!--- software distributed under the License is distributed on an -->
-
 <!--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -->
-
 <!--- KIND, either express or implied.  See the License for the -->
-
 <!--- specific language governing permissions and limitations -->
-
 <!--- under the License. -->
 
 <h1 id="optimizing-memory-consumption-in-deep-learning">Optimizing Memory Consumption in Deep Learning</h1>
@@ -325,7 +449,7 @@
 is towards deeper and larger networks.
 Despite rapid advances in hardware performance,
 cutting-edge deep learning models continue to push the limits of GPU RAM.
-So even today, it&#39;s always desirable to find ways
+So even today, it’s always desirable to find ways
 to train larger models while consuming less memory.
 Doing so enables us to train faster, using larger batch sizes,
 and consequently achieving a higher GPU utilization rate.</p>
@@ -339,14 +463,14 @@
 
 <h2 id="computation-graph">Computation Graph</h2>
 
-<p>First, let&#39;s revisit the idea of the computation graph.
+<p>First, let’s revisit the idea of the computation graph.
 A computation graph describes the (data flow) dependencies
 between the operations in the deep network.
 The operations performed in the graph
 can be either fine-grained or coarse-grained.
 The following figure shows two examples of computation graphs.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/comp_graph_example.png" alt="Comp Graph Example"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/comp_graph_example.png" alt="Comp Graph Example" /></p>
 
 <p>The concept of a computation graph is explicitly encoded in packages like Theano and CGT.
 In other libraries, computation graphs appear implicitly as network configuration files.
@@ -354,7 +478,7 @@
 There are mainly two ways: performing back-propagation on the <em>same</em> graph
 or explicitly representing a <em>backwards path</em> to calculate the required gradients.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/back_graph.png" alt="Backward Graph"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/back_graph.png" alt="Backward Graph" /></p>
 
 <p>Libraries like Caffe, CXXNet, and Torch take the former approach,
 performing back-prop on the original graph.
@@ -363,28 +487,28 @@
 In this discussion, we adopt the <em>explicit backward path</em> approach
 because it has several advantages for optimization.</p>
 
-<p>However, we should emphasize that choosing the explicit backward path approach doesn&#39;t restrict us
+<p>However, we should emphasize that choosing the explicit backward path approach doesn’t restrict us
 to symbolic libraries, such as Theano and CGT. We can also use the explicit backward path for gradient calculation of
 layer-based (which ties forward and backward together) libraries. The following graph shows how to do this.
-Basically, we introduce a backward node that links to the forward node of the graph and calls the <code>layer.backward</code>
+Basically, we introduce a backward node that links to the forward node of the graph and calls the <code class="highlighter-rouge">layer.backward</code>
 in the backward operations.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/explicit_back_layer.png" alt="Backward Layer"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/explicit_back_layer.png" alt="Backward Layer" /></p>
 
 <p>This discussion applies to almost all existing deep learning libraries.
 (There are differences between libraries,  e.g., higher-order differentiation, which is beyond the scope of this topic.)</p>
 
-<p>Why is the explicit backward path better? Let&#39;s explain it with two examples.
+<p>Why is the explicit backward path better? Let’s explain it with two examples.
 The first reason is that the explicit backward path
 clearly describes the dependency between computations.
 Consider the following case, where we want to get
 the gradient of A and B. As we can see clearly from the graph,
-the computation of the <code>d(C)</code> gradient doesn&#39;t depend on F.
-This means that we can free the memory of <code>F</code>
+the computation of the <code class="highlighter-rouge">d(C)</code> gradient doesn’t depend on F.
+This means that we can free the memory of <code class="highlighter-rouge">F</code>
 right after the forward computation is done.
-Similarly, the memory of <code>C</code> can be recycled.</p>
+Similarly, the memory of <code class="highlighter-rouge">C</code> can be recycled.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/back_dep_prune.png" alt="Backward Prune"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/back_dep_prune.png" alt="Backward Prune" /></p>
 
 <p>Another advantage of the explicit backward path
 is the ability to have a different backward path,
@@ -392,20 +516,20 @@
 A common example is the split connection case,
 as shown in the following figure.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/back_agg_grad.png" alt="Backward Agg"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/back_agg_grad.png" alt="Backward Agg" /></p>
 
 <p>In this example, the output of B is referenced by two operations.
 If we want to do the gradient calculation in the same
 network, we need to introduce an explicit split layer.
 This means we need to do the split for the forward pass, too.
-In this figure, the forward pass doesn&#39;t contain a split layer,
+In this figure, the forward pass doesn’t contain a split layer,
 but the graph will automatically insert a gradient
 aggregation node before passing the gradient back to B.
 This helps us to save the memory cost of allocating the output of the split layer,
 and the operation cost of replicating the data in the forward pass.</p>
 
 <p>If we adopt the explicit backward approach,
-there&#39;s no difference between the forward pass and the backward pass.
+there’s no difference between the forward pass and the backward pass.
 We simply step through the computation graph in chronological order
 and carry out computations.
 This makes the explicit backward approach easy to analyze.
@@ -416,22 +540,21 @@
 
 <p>As you can see, the computation graph is a useful way
 to discuss memory allocation optimization techniques.
-Already, we&#39;ve shown how you can save some memory
+Already, we’ve shown how you can save some memory
 by using the explicit backward graph.
-Now let&#39;s explore further optimizations,
+Now let’s explore further optimizations,
 and see how we might determine reasonable baselines for benchmarking.</p>
 
-<p>Assume that we want to build a neural network with <code>n</code> layers.
+<p>Assume that we want to build a neural network with <code class="highlighter-rouge">n</code> layers.
 Typically, when implementing a neural network,
 we need to allocate node space for both the output of each layer
 and the gradient values used during back-propagation.
-This means we need roughly <code>2 n</code> memory cells.
+This means we need roughly <code class="highlighter-rouge">2 n</code> memory cells.
 We face the same requirement when using the explicit backward graph approach
 because the number of nodes in a backward pass
 is roughly the same as in a forward pass.</p>
 
 <h3 id="in-place-operations">In-place Operations</h3>
-
 <p>One of the simplest techniques we can employ
 is <em>in-place memory sharing</em> across operations.
 For neural networks, we can usually apply this technique
@@ -439,95 +562,93 @@
 Consider the following case, where we want
 to compute the value of three chained sigmoid functions.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/alloc_inline.png" alt="Inplace op"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/alloc_inline.png" alt="Inplace op" /></p>
 
-<p>Because we can compute sigmoid <code>in-place</code>,
+<p>Because we can compute sigmoid <code class="highlighter-rouge">in-place</code>,
 using the same memory for input and output,
 we can compute an arbitrary-length chain
 of sigmoid functions using constant memory.</p>
 
-<p>Note: it&#39;s easy to make mistakes when implementing in-place optimization.
+<p>Note: it’s easy to make mistakes when implementing in-place optimization.
 Consider the following case, where the value of B is used not only by C, but also by F.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/alloc_inline_trap.png" alt="In-place trap"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/alloc_inline_trap.png" alt="In-place trap" /></p>
 
-<p>We can&#39;t perform in-place optimization because the value of B
-is still needed after <code>C=sigmoid(B)</code> is computed.
+<p>We can’t perform in-place optimization because the value of B
+is still needed after <code class="highlighter-rouge">C=sigmoid(B)</code> is computed.
 An algorithm that simply does in-place optimization
 for every sigmoid operation might fall into such trap,
 so we need to be careful about when we can use it.</p>
 
 <h3 id="standard-memory-sharing">Standard Memory Sharing</h3>
-
 <p>In-place operations are not the only places where we can share memory.
 In the following example, because the value of B is no longer needed
-after we compute E, we can reuse B&#39;s memory to hold the result of E.</p>
+after we compute E, we can reuse B’s memory to hold the result of E.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/alloc_normal.png" alt="Normal Sharing"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/alloc_normal.png" alt="Normal Sharing" /></p>
 
-<p><em>Memory sharing doesn&#39;t necessarily require the same data shape</em>.
-Note that in the preceding example, the shapes of <code>B</code> and <code>E</code> can differ.
+<p><em>Memory sharing doesn’t necessarily require the same data shape</em>.
+Note that in the preceding example, the shapes of <code class="highlighter-rouge">B</code> and <code class="highlighter-rouge">E</code> can differ.
 To handle such a situation, we can allocate a memory region
-of size equal to the maximum of that required by <code>B</code> and <code>E</code> and share it between them.</p>
+of size equal to the maximum of that required by <code class="highlighter-rouge">B</code> and <code class="highlighter-rouge">E</code> and share it between them.</p>
 
 <h3 id="example-of-real-neural-network-allocation">Example of Real Neural Network Allocation</h3>
-
 <p>Of course, these are only toy examples and they address only the computation of the forward pass.
 But the same ideas apply to real neural networks.
 The following figure shows an allocation plan for a two-layer perceptron.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/alloc_mlp.png" alt="Net Alloc"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/alloc_mlp.png" alt="Net Alloc" /></p>
 
 <p>In this example:</p>
 
 <ul>
-<li>In-place optimization is applied when computing <code>act1</code>, <code>d(fc1)</code>, <code>out</code> and <code>d(fc2)</code>.</li>
-<li>Memory is shared between <code>d(act1)</code> and <code>d(A)</code>.</li>
+  <li>In-place optimization is applied when computing <code class="highlighter-rouge">act1</code>, <code class="highlighter-rouge">d(fc1)</code>, <code class="highlighter-rouge">out</code> and <code class="highlighter-rouge">d(fc2)</code>.</li>
+  <li>Memory is shared between <code class="highlighter-rouge">d(act1)</code> and <code class="highlighter-rouge">d(A)</code>.</li>
 </ul>
 
 <h2 id="memory-allocation-algorithm">Memory Allocation Algorithm</h2>
 
-<p>So far, we&#39;ve discussed general techniques for optimizing memory allocation.
-We&#39;ve seen that there are traps to avoid,
+<p>So far, we’ve discussed general techniques for optimizing memory allocation.
+We’ve seen that there are traps to avoid,
 as demonstrated in the case of in-place memory optimization.
 So, how can we allocate memory correctly?
 This is not a new problem.
 For example, it is very similar
 to the problem with register allocation in compilers.
 There might be techniques that we can borrow.
-We&#39;re not attempting to give a comprehensive review of techniques here,
+We’re not attempting to give a comprehensive review of techniques here,
 but rather to introduce some simple
 but useful tricks to attack the problem.</p>
 
 <p>The key problem is that we need to place resources
-so that they don&#39;t conflict with each other.
+so that they don’t conflict with each other.
 More specifically, each variable has a <em>life time</em>
 between the time it gets computed until the last time it is used.
 In the case of the multi-layer perceptron,
-the <em>life time</em> of <code>fc1</code> ends after <code>act1</code> get computed.</p>
+the <em>life time</em> of <code class="highlighter-rouge">fc1</code> ends after <code class="highlighter-rouge">act1</code> get computed.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/alloc_mlp.png" alt="Net Alloc"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/alloc_mlp.png" alt="Net Alloc" /></p>
 
-<p>The principle is <em>to allow memory sharing only between variables whose lifetimes don&#39;t overlap</em>.
+<p>The principle is <em>to allow memory sharing only between variables whose lifetimes don’t overlap</em>.
 There are multiple ways to do this.
 You can construct the conflicting graph
 with each variable as a node and link the edge
 between variables with overlapping lifespans,
 and then run a graph-coloring algorithm.
-This likely has <code>$O(n^2)$</code> complexity,
-where <code>n</code> is the number of nodes in the graph.
+This likely has <code class="highlighter-rouge">$O(n^2)$</code> complexity,
+where <code class="highlighter-rouge">n</code> is the number of nodes in the graph.
 This might be too costly.</p>
 
-<p>Let&#39;s consider another simple heuristic.
+<p>Let’s consider another simple heuristic.
 The idea is to simulate the procedure of traversing the graph,
 and keep a count of future operations that depends on the node.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/alloc_step.png" alt="Alloc"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/alloc_step.png" alt="Alloc" /></p>
 
 <ul>
-<li>An in-place optimization can be performed when only the current operation depends on the source (i.e., <code>count==1</code>).</li>
-<li>Memory can be recycled into the box on the upper right corner when the <code>count</code> goes to 0.</li>
-<li>When we need new memory, we can either get it from the box or allocate a new one.</li>
+  <li>An in-place optimization can be performed when only the current operation depends on the source (i.e., <code class="highlighter-rouge">count==1</code>).</li>
+  <li>Memory can be recycled into the box on the upper right corner when the <code class="highlighter-rouge">count</code> goes to 0.</li>
+  <li>When we need new memory, we can either get it from the box or allocate a new one.</li>
 </ul>
 
 <p><strong><em>Note:</em></strong> During the simulation, no memory is allocated.
@@ -539,7 +660,7 @@
 <p>The preceding strategy exactly simulates
 the dynamic memory allocation procedure
 in imperative languages, such as Python.
-The <code>count</code> is the reference counter for each memory object,
+The <code class="highlighter-rouge">count</code> is the reference counter for each memory object,
 and the object gets garbage collected
 when the reference counter goes to 0.
 In that sense,
@@ -553,7 +674,7 @@
 For example, we can search for memory sizes
 that are similar to the required memory block.
 The Allocation can also be made graph aware.
-We&#39;ll talk about that in the next section.
+We’ll talk about that in the next section.
 Dynamic allocation puts more pressure
 on fast memory allocation and garbage collection.</p>
 
@@ -572,21 +693,20 @@
 for a computation graph to get a static allocation plan.
 However, optimizing for parallel computation presents other challenges
 because resource sharing and parallelization are on the two ends of a balance.
-Let&#39;s look at the following two allocation plans for the same graph:</p>
+Let’s look at the following two allocation plans for the same graph:</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/parallel_alloc.png" alt="Parallel Alloc"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/parallel_alloc.png" alt="Parallel Alloc" /></p>
 
 <p>Both allocation plans are valid
 if we run the computation serially,
-from <code>A[1]</code> to <code>A[8]</code>.
+from <code class="highlighter-rouge">A[1]</code> to <code class="highlighter-rouge">A[8]</code>.
 However, the allocation plan on the left
 introduces additional dependencies,
-which means we can&#39;t run computation of <code>A[2]</code> and <code>A[5]</code> in parallel.
+which means we can’t run computation of <code class="highlighter-rouge">A[2]</code> and <code class="highlighter-rouge">A[5]</code> in parallel.
 The plan on the right can.
 To parallelize computation, we need to take greater care.</p>
 
 <h3 id="be-correct-and-safe-first">Be Correct and Safe First</h3>
-
 <p>Being correct is our first principle.
 This means to execute in a way that takes implicit dependency
 memory sharing into consideration.
@@ -601,37 +721,36 @@
 This means never allocate the same memory
 to nodes that can be parallelized.
 This might not be ideal when memory reduction is more desirable,
-and we don&#39;t gain too much when we can get benefit
+and we don’t gain too much when we can get benefit
 from multiple computing streams simultaneously executing on the same GPU.</p>
 
 <h3 id="try-to-allow-more-parallelization">Try to Allow More Parallelization</h3>
-
 <p>Now we can safely perform some optimizations.
-The general idea is to try and encourage memory sharing between nodes that can&#39;t be parallelized.
+The general idea is to try and encourage memory sharing between nodes that can’t be parallelized.
 You can do this by creating an ancestor relationship
 graph and querying it during allocation,
-which costs approximately <code>$O(n^2)$</code> in time to construct.
+which costs approximately <code class="highlighter-rouge">$O(n^2)$</code> in time to construct.
 We can also use a heuristic here,
 for example, color the path in the graph.
 As shown in the following figure,
 when you try to find the longest paths in the graph,
 color them the same color and continue.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/graph_color.png" alt="Path Color"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/memory/graph_color.png" alt="Path Color" /></p>
 
 <p>After you get the color of the node,
 you allow sharing (or encourage sharing)
 only between nodes of the same color.
 This is a stricter version of the ancestor relationship,
-but it costs only <code>$O(n)$</code> of time
-if you search for only the first <code>k</code> path.</p>
+but it costs only <code class="highlighter-rouge">$O(n)$</code> of time
+if you search for only the first <code class="highlighter-rouge">k</code> path.</p>
 
 <p>This is by no means the only solution.
 More sophisticated approaches might exist:</p>
 
 <h2 id="how-much-can-you-save">How Much Can you Save?</h2>
 
-<p>We&#39;ve discussed the techniques and algorithms you can use
+<p>We’ve discussed the techniques and algorithms you can use
 to squeeze memory usage for deep learning.
 How much can you really save by using these techniques?</p>
 
@@ -642,19 +761,19 @@
 if you are optimizing a fine-grained computation network
 used by symbolic libraries, such as Theano. Most of the ideas in this article inspired the design of <em>MXNet</em>.</p>
 
-<p>Also, you will notice that memory cost, for forward pass only execution, is extremely low compared to running both forward and backward pass. This is simply because there&#39;s  more memory reuse if you run only the forward pass.</p>
+<p>Also, you will notice that memory cost, for forward pass only execution, is extremely low compared to running both forward and backward pass. This is simply because there’s  more memory reuse if you run only the forward pass.</p>
 
 <p>So here are two takeaways:</p>
 
 <ul>
-<li>Use a computation graph to allocate memory.</li>
-<li>For deep learning models, prediction consumes much less memory than training.</li>
+  <li>Use a computation graph to allocate memory.</li>
+  <li>For deep learning models, prediction consumes much less memory than training.</li>
 </ul>
 
 <h2 id="next-steps">Next Steps</h2>
 
 <ul>
-<li><a href="note_data_loading">Efficient Data Loading Module for Deep Learning</a></li>
+  <li><a href="note_data_loading">Efficient Data Loading Module for Deep Learning</a></li>
 </ul>
 
     </div>
@@ -671,8 +790,7 @@
             <div class="col-4">
                 <h4 class="footer-category-title">Resources</h4>
                 <ul class="contact-list">
-                    <li><a class="u-email" href="mailto:dev@mxnet.apache.org">Dev list</a></li>
-                    <li><a class="u-email" href="mailto:user@mxnet.apache.org">User mailing list</a></li>
+                    <li><a href="/community/contribute#mxnet-dev-communications">Mailing lists</a></li>
                     <li><a href="https://cwiki.apache.org/confluence/display/MXNET/Apache+MXNet+Home">Developer Wiki</a></li>
                     <li><a href="https://issues.apache.org/jira/projects/MXNET/issues">Jira Tracker</a></li>
                     <li><a href="https://github.com/apache/incubator-mxnet/labels/Roadmap">Github Roadmap</a></li>
diff --git a/api/architecture/overview.html b/api/architecture/overview.html
index 54a7f8c..1f6f1bb 100644
--- a/api/architecture/overview.html
+++ b/api/architecture/overview.html
@@ -5,23 +5,23 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"><!-- Begin Jekyll SEO tag v2.6.1 -->
 <title>MXNet System Architecture | Apache MXNet</title>
-<meta name="generator" content="Jekyll v3.8.6" />
+<meta name="generator" content="Jekyll v4.0.0" />
 <meta property="og:title" content="MXNet System Architecture" />
 <meta property="og:locale" content="en_US" />
 <meta name="description" content="A flexible and efficient library for deep learning." />
 <meta property="og:description" content="A flexible and efficient library for deep learning." />
-<link rel="canonical" href="https://mxnet.incubator.apache.org/api/architecture/overview" />
-<meta property="og:url" content="https://mxnet.incubator.apache.org/api/architecture/overview" />
+<link rel="canonical" href="https://mxnet-beta.staged.apache.org//api/architecture/overview" />
+<meta property="og:url" content="https://mxnet-beta.staged.apache.org//api/architecture/overview" />
 <meta property="og:site_name" content="Apache MXNet" />
 <script type="application/ld+json">
-{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"MXNet System Architecture","url":"https://mxnet.incubator.apache.org/api/architecture/overview","@context":"https://schema.org"}</script>
+{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"MXNet System Architecture","url":"https://mxnet-beta.staged.apache.org//api/architecture/overview","@context":"https://schema.org"}</script>
 <!-- End Jekyll SEO tag -->
 <script src="https://medium-widget.pixelpoint.io/widget.js"></script>
-  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet.incubator.apache.org/feed.xml" title="Apache MXNet" /><script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
-
-  <script src="/assets/js/clipboard.js"></script>
-  <script src="/assets/js/copycode.js"></script>
-</head>
+  <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css" />
+  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet-beta.staged.apache.org//feed.xml" title="Apache MXNet" /><script src="/assets/js/jquery-3.3.1.min.js"></script><script src="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.js" defer></script>
+  <script src="/assets/js/globalSearch.js" defer></script>
+  <script src="/assets/js/clipboard.js" defer></script>
+  <script src="/assets/js/copycode.js" defer></script></head>
 <body><header class="site-header" role="banner">
 
   <script>
@@ -59,14 +59,173 @@
             </svg>
           </span>
       </label>
-
+      <div class="gs-search-border">
+        <div id="gs-search-icon"></div>
+        <form id="global-search-form">
+          <input id="global-search" type="text" title="Search" placeholder="Search" />
+          <div id="global-search-dropdown-container">
+            <button class="gs-current-version btn" type="button" data-toggle="dropdown">
+                <span id="gs-current-version-label">master</span>
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+          <span id="global-search-close">x</span>
+        </form>
+      </div>
       <div class="trigger">
+        <div id="global-search-mobile-border">
+          <div id="gs-search-icon-mobile"></div>
+          <input id="global-search-mobile" placeholder="Search..." type="text"/>
+          <div id="global-search-dropdown-container-mobile">
+            <button class="gs-current-version-mobile btn" type="button" data-toggle="dropdown">
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown-mobile">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+        </div>
         <a class="page-link" href="/get_started">Get Started</a>
         <a class="page-link" href="/blog">Blog</a>
         <a class="page-link" href="/features">Features</a>
         <a class="page-link" href="/ecosystem">Ecosystem</a>
         <a class="page-link" href="/api">Docs & Tutorials</a>
         <a class="page-link" href="https://github.com/apache/incubator-mxnet">GitHub</a>
+        <div class="dropdown">
+          <span class="dropdown-header">master
+            <svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
+          </span>
+          <div class="dropdown-content">
+            
+              
+                <a class="dropdown-option-active" href="/">master</a>
+              
+            
+              
+                <a href="/versions/1.6/">1.6</a>
+              
+            
+              
+                <a href="/versions/1.5.0/">1.5.0</a>
+              
+            
+              
+                <a href="/versions/1.4.1/">1.4.1</a>
+              
+            
+              
+                <a href="/versions/1.3.1/">1.3.1</a>
+              
+            
+              
+                <a href="/versions/1.2.1/">1.2.1</a>
+              
+            
+              
+                <a href="/versions/1.1.0/">1.1.0</a>
+              
+            
+              
+                <a href="/versions/1.0.0/">1.0.0</a>
+              
+            
+              
+                <a href="/versions/0.12.1/">0.12.1</a>
+              
+            
+              
+                <a href="/versions/0.11.0/">0.11.0</a>
+              
+            
+          </div>
+        </div>
       </div>
     </nav>
   </div>
@@ -128,8 +287,6 @@
             
               <!-- page-category -->
             
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/exception_handling">Exception Handling in MXNet</a></li>
               <!-- page-category -->
@@ -210,22 +367,6 @@
             
               <!-- page-category -->
             
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/note_data_loading">Efficient Data Loaders</a></li>
               <!-- page-category -->
@@ -238,14 +379,14 @@
             <li><a href="/api/architecture/note_memory">Memory Consumption</a></li>
               <!-- page-category -->
             
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/overview">MXNet System Architecture</a></li>
               <!-- page-category -->
             
               <!-- page-category -->
             
+              <!-- page-category -->
+            
             
             <li><a href="/api/architecture/program_model">Deep Learning Programming Paradigm</a></li>
               <!-- page-category -->
@@ -281,78 +422,61 @@
               <!-- page-category -->
             
               <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
                <!-- resource-p -->
         </ul>
     </div>
     <div class="col-9">
         <!--- Licensed to the Apache Software Foundation (ASF) under one -->
-
 <!--- or more contributor license agreements.  See the NOTICE file -->
-
 <!--- distributed with this work for additional information -->
-
 <!--- regarding copyright ownership.  The ASF licenses this file -->
-
 <!--- to you under the Apache License, Version 2.0 (the -->
-
 <!--- "License"); you may not use this file except in compliance -->
-
 <!--- with the License.  You may obtain a copy of the License at -->
 
 <!---   http://www.apache.org/licenses/LICENSE-2.0 -->
 
 <!--- Unless required by applicable law or agreed to in writing, -->
-
 <!--- software distributed under the License is distributed on an -->
-
 <!--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -->
-
 <!--- KIND, either express or implied.  See the License for the -->
-
 <!--- specific language governing permissions and limitations -->
-
 <!--- under the License. -->
 
 <h1 id="mxnet-system-architecture">MXNet System Architecture</h1>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/dmlc.github.io/master/img/mxnet/system/overview.png" alt="System Overview"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/dmlc.github.io/master/img/mxnet/system/overview.png" alt="System Overview" /></p>
 
 <p>This figure shows the major modules and components of the MXNet system and their interaction. The modules are:</p>
 
 <ul>
-<li>Runtime Dependency Engine: Schedules and executes the
+  <li>Runtime Dependency Engine: Schedules and executes the
 operations according to their read/write dependency.</li>
-<li>Storage Allocator: Efficiently allocates and recycles memory blocks
+  <li>Storage Allocator: Efficiently allocates and recycles memory blocks
 on host (CPU) and devices (GPUs).</li>
-<li>Resource Manager: Manages global resources, such as the random number generator
+  <li>Resource Manager: Manages global resources, such as the random number generator
 and temporal space.</li>
-<li>NDArray: Dynamic, asynchronous n-dimensional arrays,
+  <li>NDArray: Dynamic, asynchronous n-dimensional arrays,
 which provide flexible imperative programs for MXNet.</li>
-<li>Symbolic Execution: Static symbolic graph executor,
+  <li>Symbolic Execution: Static symbolic graph executor,
 which provides efficient symbolic graph execution and optimization.</li>
-<li>Operator: Operators that define static forward and gradient
+  <li>Operator: Operators that define static forward and gradient
 calculation (backprop).</li>
-<li>SimpleOp: Operators that extend NDArray operators and symbolic operators
+  <li>SimpleOp: Operators that extend NDArray operators and symbolic operators
 in a unified fashion.</li>
-<li>Symbol Construction: Symbolic construction, which provides a way to construct
+  <li>Symbol Construction: Symbolic construction, which provides a way to construct
 a computation graph (net configuration).</li>
-<li>KVStore: Key-value store interface for efficient parameter synchronization.</li>
-<li>Data Loading(IO): Efficient distributed data loading and augmentation.</li>
+  <li>KVStore: Key-value store interface for efficient parameter synchronization.</li>
+  <li>Data Loading(IO): Efficient distributed data loading and augmentation.</li>
 </ul>
 
 <h1 id="mxnet-system-components">MXNet System Components</h1>
 
 <h2 id="execution-engine">Execution Engine</h2>
 
-<p>You can use MXNet&#39;s engine not only for deep learning,
+<p>You can use MXNet’s engine not only for deep learning,
 but for any domain-specific problem.
-It&#39;s designed to solve a general problem:
+It’s designed to solve a general problem:
 execute a bunch of functions following their dependencies.
 Execution of any two functions with dependencies should be serialized.
 To boost performance, functions with no dependencies <em>can</em> be executed in parallel.
@@ -362,19 +486,20 @@
 <h3 id="interface">Interface</h3>
 
 <p>The following API is the core interface for the execution engine:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="k">virtual</span> <span class="kt">void</span> <span class="n">PushSync</span><span class="p">(</span><span class="n">Fn</span> <span class="n">exec_fun</span><span class="p">,</span> <span class="n">Context</span> <span class="n">exec_ctx</span><span class="p">,</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">virtual</span> <span class="kt">void</span> <span class="n">PushSync</span><span class="p">(</span><span class="n">Fn</span> <span class="n">exec_fun</span><span class="p">,</span> <span class="n">Context</span> <span class="n">exec_ctx</span><span class="p">,</span>
                           <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">VarHandle</span><span class="o">&gt;</span> <span class="k">const</span><span class="o">&amp;</span> <span class="n">const_vars</span><span class="p">,</span>
                           <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">VarHandle</span><span class="o">&gt;</span> <span class="k">const</span><span class="o">&amp;</span> <span class="n">mutate_vars</span><span class="p">)</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span>
-</code></pre></div>
-<p>This API allows you to push a function (<code>exec_fun</code>),
+</code></pre></div></div>
+<p>This API allows you to push a function (<code class="highlighter-rouge">exec_fun</code>),
 along with its context information and dependencies, to the engine.
-<code>exec_ctx</code> is the context information in which the <code>exec_fun</code> should be executed,
-<code>const_vars</code> denotes the variables that the function reads from,
-and <code>mutate_vars</code> are the variables to be modified.
+<code class="highlighter-rouge">exec_ctx</code> is the context information in which the <code class="highlighter-rouge">exec_fun</code> should be executed,
+<code class="highlighter-rouge">const_vars</code> denotes the variables that the function reads from,
+and <code class="highlighter-rouge">mutate_vars</code> are the variables to be modified.
 The engine provides the following guarantee:</p>
 
 <blockquote>
-<p><em>The execution of any two functions
+  <p><em>The execution of any two functions
 that modify a common variable
 is serialized in their push order.</em></p>
 </blockquote>
@@ -382,107 +507,112 @@
 <h3 id="function">Function</h3>
 
 <p>The function type of the engine is:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="k">using</span> <span class="n">Fn</span> <span class="o">=</span> <span class="n">std</span><span class="o">::</span><span class="n">function</span><span class="o">&lt;</span><span class="kt">void</span><span class="p">(</span><span class="n">RunContext</span><span class="p">)</span><span class="o">&gt;</span><span class="p">;</span>
-</code></pre></div>
-<p><code>RunContext</code> contains runtime information, which is determined by the engine:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="k">struct</span> <span class="n">RunContext</span> <span class="p">{</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">using</span> <span class="n">Fn</span> <span class="o">=</span> <span class="n">std</span><span class="o">::</span><span class="n">function</span><span class="o">&lt;</span><span class="kt">void</span><span class="p">(</span><span class="n">RunContext</span><span class="p">)</span><span class="o">&gt;</span><span class="p">;</span>
+</code></pre></div></div>
+<p><code class="highlighter-rouge">RunContext</code> contains runtime information, which is determined by the engine:</p>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">struct</span> <span class="n">RunContext</span> <span class="p">{</span>
         <span class="c1">// stream pointer which could be safely cast to</span>
         <span class="c1">// cudaStream_t* type</span>
-        <span class="kt">void</span> <span class="o">*</span><span class="n">stream</span><span class="p">;</span>
+	    <span class="kt">void</span> <span class="o">*</span><span class="n">stream</span><span class="p">;</span>
     <span class="p">};</span>
-</code></pre></div>
-<p>Alternatively, you could use <code>mxnet::engine::DAGEngine::Fn</code>, which has the same type definition.</p>
+</code></pre></div></div>
+<p>Alternatively, you could use <code class="highlighter-rouge">mxnet::engine::DAGEngine::Fn</code>, which has the same type definition.</p>
 
-<p>All of the functions are executed by the engine&#39;s internal threads.
-In such a model, it&#39;s usually not a good idea to push <em>blocking</em> functions
+<p>All of the functions are executed by the engine’s internal threads.
+In such a model, it’s usually not a good idea to push <em>blocking</em> functions
 to the engine (usually for dealing with I/O tasks like disk, web service, UI, etc.)
 because it will occupy the execution thread and reduce total throughput.
 In that case, we provide another <em>asynchronous</em> function type:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="k">using</span> <span class="n">Callback</span> <span class="o">=</span> <span class="n">std</span><span class="o">::</span><span class="n">function</span><span class="o">&lt;</span><span class="kt">void</span><span class="p">()</span><span class="o">&gt;</span><span class="p">;</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">using</span> <span class="n">Callback</span> <span class="o">=</span> <span class="n">std</span><span class="o">::</span><span class="n">function</span><span class="o">&lt;</span><span class="kt">void</span><span class="p">()</span><span class="o">&gt;</span><span class="p">;</span>
     <span class="k">using</span> <span class="n">AsyncFn</span> <span class="o">=</span> <span class="n">std</span><span class="o">::</span><span class="n">function</span><span class="o">&lt;</span><span class="kt">void</span><span class="p">(</span><span class="n">RunContext</span><span class="p">,</span> <span class="n">Callback</span><span class="p">)</span><span class="o">&gt;</span><span class="p">;</span>
-</code></pre></div>
-<p>In the <code>AsyncFn</code> function, you can pass the heavy part to your own threads
+</code></pre></div></div>
+<p>In the <code class="highlighter-rouge">AsyncFn</code> function, you can pass the heavy part to your own threads
 and safely exit the body of the function.
-The engine doesn&#39;t consider the function finished
-until the <code>Callback</code> function is called.</p>
+The engine doesn’t consider the function finished
+until the <code class="highlighter-rouge">Callback</code> function is called.</p>
 
 <h3 id="context">Context</h3>
 
-<p>You can specify the <code>Context</code> of the function to be executed within.
+<p>You can specify the <code class="highlighter-rouge">Context</code> of the function to be executed within.
 This usually includes whether the function should be run on a CPU or a GPU,
 and if you specify a GPU, which GPU to use.
-<code>Context</code> is different from <code>RunContext</code>.
-<code>Context</code> contains device type (GPU/CPU) and device id,
- while <code>RunContext</code> contains information that can be decided only during runtime,
+<code class="highlighter-rouge">Context</code> is different from <code class="highlighter-rouge">RunContext</code>.
+<code class="highlighter-rouge">Context</code> contains device type (GPU/CPU) and device id,
+ while <code class="highlighter-rouge">RunContext</code> contains information that can be decided only during runtime,
  for example, on which stream the function should be executed.</p>
 
 <h3 id="varhandle">VarHandle</h3>
 
-<p><code>VarHandle</code> is used to specify the dependencies of functions.
+<p><code class="highlighter-rouge">VarHandle</code> is used to specify the dependencies of functions.
 The MXNet engine is designed to be decoupled from other MXNet modules.
-So <code>VarHandle</code> is like an engine-provided token you use
+So <code class="highlighter-rouge">VarHandle</code> is like an engine-provided token you use
 to represent the external resources the functions can use or modify.
-It&#39;s designed to be lightweight, so creating,
+It’s designed to be lightweight, so creating,
 deleting, or copying a variable incurs little overhead.
 Upon pushing the functions, you need to specify the variables
-that will be used (immutable) in the <code>const_vars</code> vector,
-and the variables that will be modified (mutable) in the <code>mutate_vars</code> vector.
+that will be used (immutable) in the <code class="highlighter-rouge">const_vars</code> vector,
+and the variables that will be modified (mutable) in the <code class="highlighter-rouge">mutate_vars</code> vector.
 The engine uses one rule for resolving the dependencies among functions:</p>
 
 <blockquote>
-<p><em>The execution of any two functions when one of them modifies at least one common variable is serialized in their push order.</em></p>
+  <p><em>The execution of any two functions when one of them modifies at least one common variable is serialized in their push order.</em></p>
 </blockquote>
 
-<p>For example, if <code>Fn1</code> and <code>Fn2</code> both mutate <code>V2</code> then <code>Fn2</code>
-is guaranteed to be executed after <code>Fn1</code>
-if <code>Fn2</code> is pushed after <code>Fn1</code>.
-On the other hand, if <code>Fn1</code> and <code>Fn2</code> both use <code>V2</code>,
+<p>For example, if <code class="highlighter-rouge">Fn1</code> and <code class="highlighter-rouge">Fn2</code> both mutate <code class="highlighter-rouge">V2</code> then <code class="highlighter-rouge">Fn2</code>
+is guaranteed to be executed after <code class="highlighter-rouge">Fn1</code>
+if <code class="highlighter-rouge">Fn2</code> is pushed after <code class="highlighter-rouge">Fn1</code>.
+On the other hand, if <code class="highlighter-rouge">Fn1</code> and <code class="highlighter-rouge">Fn2</code> both use <code class="highlighter-rouge">V2</code>,
 their actual execution order could be random.</p>
 
 <p>This design allows the engine to schedule <em>state-mutating</em> operations in a manner
 that minimizes calls to allocate new memory.
 For example, the weight update function in DNN
-can now use the <code>+=</code> operator
+can now use the <code class="highlighter-rouge">+=</code> operator
 to update the weights in place,
 rather than generating a new weight array each time.</p>
 
-<p>To create a variable, use the <code>NewVar()</code> API.
-To delete a variable, use the <code>PushDelete</code> API.</p>
+<p>To create a variable, use the <code class="highlighter-rouge">NewVar()</code> API.
+To delete a variable, use the <code class="highlighter-rouge">PushDelete</code> API.</p>
 
 <h3 id="push-and-wait">Push and Wait</h3>
 
-<p><em>All <code>Push</code> APIs are asynchronous.</em> The API call returns immediately
-regardless of whether the pushed <code>Fn</code> is finished or not.
+<p><em>All <code class="highlighter-rouge">Push</code> APIs are asynchronous.</em> The API call returns immediately
+regardless of whether the pushed <code class="highlighter-rouge">Fn</code> is finished or not.
 This allows the engine to start computing at the same time
 as the user thread is pushing functions.
-<code>Push</code> APIs are not thread-safe.
+<code class="highlighter-rouge">Push</code> APIs are not thread-safe.
 To be specific, only one thread should make engine API calls at a time.</p>
 
-<p>If you want to wait for a specific <code>Fn</code> to finish,
+<p>If you want to wait for a specific <code class="highlighter-rouge">Fn</code> to finish,
 include a callback function in the closure,
-and call the function at the end of your <code>Fn</code>.</p>
+and call the function at the end of your <code class="highlighter-rouge">Fn</code>.</p>
 
-<p>If you want to wait for all <code>Fn</code>s
+<p>If you want to wait for all <code class="highlighter-rouge">Fn</code>s
 that involve (use or mutate) a certain variable to finish,
-use the <code>WaitForVar(var)</code> API.</p>
+use the <code class="highlighter-rouge">WaitForVar(var)</code> API.</p>
 
-<p>If you want to wait for all pushed <code>Fn</code>s to finish,
-use the <code>WaitForAll()</code> API.</p>
+<p>If you want to wait for all pushed <code class="highlighter-rouge">Fn</code>s to finish,
+use the <code class="highlighter-rouge">WaitForAll()</code> API.</p>
 
 <h3 id="save-object-creation-cost">Save Object Creation Cost</h3>
 
 <p>In some cases, you need to push several functions to the engine for a long period of time.
 If the computation of these functions is light,
 the overhead of copying lambdas and creating use/mutate variable lists becomes relatively high.
-We provide an API to create an <code>OprHandle</code> beforehand:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="k">virtual</span> <span class="n">OprHandle</span> <span class="n">NewOperator</span><span class="p">(</span><span class="n">AsyncFn</span> <span class="n">fn</span><span class="p">,</span>
+We provide an API to create an <code class="highlighter-rouge">OprHandle</code> beforehand:</p>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">virtual</span> <span class="n">OprHandle</span> <span class="n">NewOperator</span><span class="p">(</span><span class="n">AsyncFn</span> <span class="n">fn</span><span class="p">,</span>
                                   <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">VarHandle</span><span class="o">&gt;</span> <span class="k">const</span><span class="o">&amp;</span> <span class="n">const_vars</span><span class="p">,</span>
                                   <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">VarHandle</span><span class="o">&gt;</span> <span class="k">const</span><span class="o">&amp;</span> <span class="n">mutate_vars</span><span class="p">)</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span>
-</code></pre></div>
-<p>You can keep pushing the <code>OprHandle</code> without repeatedly creating them:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="k">virtual</span> <span class="kt">void</span> <span class="n">Push</span><span class="p">(</span><span class="n">OprHandle</span> <span class="n">op</span><span class="p">,</span> <span class="n">Context</span> <span class="n">exec_ctx</span><span class="p">)</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span>
-</code></pre></div>
-<p>To delete it, call the <code>DeleteOperator(OprHandle op)</code> API.
+</code></pre></div></div>
+<p>You can keep pushing the <code class="highlighter-rouge">OprHandle</code> without repeatedly creating them:</p>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">virtual</span> <span class="kt">void</span> <span class="n">Push</span><span class="p">(</span><span class="n">OprHandle</span> <span class="n">op</span><span class="p">,</span> <span class="n">Context</span> <span class="n">exec_ctx</span><span class="p">)</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span>
+</code></pre></div></div>
+<p>To delete it, call the <code class="highlighter-rouge">DeleteOperator(OprHandle op)</code> API.
 Ensure that the operator has finished computing before calling this API.</p>
 
 <h2 id="operators-in-mxnet">Operators in MXNet</h2>
@@ -491,174 +621,189 @@
 and auxiliary information that can aid the system in performing optimizations,
 like in-place updates and auto-derivatives.
 To understand the remainder of the document,
-we recommend that you familiarize yourself with the <code>mshadow</code> library,
-because all operators compute on the tensor-like structure <code>mshadow::TBlob</code>
+we recommend that you familiarize yourself with the <code class="highlighter-rouge">mshadow</code> library,
+because all operators compute on the tensor-like structure <code class="highlighter-rouge">mshadow::TBlob</code>
 provided by the system during runtime.</p>
 
-<p>MXNet&#39;s operator interface allows you to:</p>
+<p>MXNet’s operator interface allows you to:</p>
 
 <ul>
-<li>Reduce memory allocation cost by specifying in-place updates.</li>
-<li>Hide some internal arguments from Python to make it cleaner.</li>
-<li>Define the relationships among input tensors and output tensors,
+  <li>Reduce memory allocation cost by specifying in-place updates.</li>
+  <li>Hide some internal arguments from Python to make it cleaner.</li>
+  <li>Define the relationships among input tensors and output tensors,
 which allows the system to perform shape checking for you.</li>
-<li>Acquire additional temporary spaces from the system
-to perform computation (e.g., calling <code>cudnn</code> routines).</li>
+  <li>Acquire additional temporary spaces from the system
+to perform computation (e.g., calling <code class="highlighter-rouge">cudnn</code> routines).</li>
 </ul>
 
 <h3 id="operator-interface">Operator Interface</h3>
 
-<p><code>Forward</code> is the core operator interface:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="k">virtual</span> <span class="kt">void</span> <span class="n">Forward</span><span class="p">(</span><span class="k">const</span> <span class="n">OpContext</span> <span class="o">&amp;</span><span class="n">ctx</span><span class="p">,</span>
+<p><code class="highlighter-rouge">Forward</code> is the core operator interface:</p>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">virtual</span> <span class="kt">void</span> <span class="n">Forward</span><span class="p">(</span><span class="k">const</span> <span class="n">OpContext</span> <span class="o">&amp;</span><span class="n">ctx</span><span class="p">,</span>
                          <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">TBlob</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">in_data</span><span class="p">,</span>
                          <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">OpReqType</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">req</span><span class="p">,</span>
                          <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">TBlob</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">out_data</span><span class="p">,</span>
                          <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">TBlob</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">aux_states</span><span class="p">)</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span>
-</code></pre></div>
-<p>The <code>OpContext</code> structure is:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">           <span class="k">struct</span> <span class="n">OpContext</span> <span class="p">{</span>
+</code></pre></div></div>
+<p>The <code class="highlighter-rouge">OpContext</code> structure is:</p>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>           <span class="k">struct</span> <span class="n">OpContext</span> <span class="p">{</span>
              <span class="kt">int</span> <span class="n">is_train</span><span class="p">;</span>
              <span class="n">RunContext</span> <span class="n">run_ctx</span><span class="p">;</span>
              <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">Resource</span><span class="o">&gt;</span> <span class="n">requested</span><span class="p">;</span>
            <span class="p">}</span>
-</code></pre></div>
+</code></pre></div></div>
 <p>It describes whether the operator is in the train or test phase,
-which device the operator should be run on (in <code>run_ctx</code>),
+which device the operator should be run on (in <code class="highlighter-rouge">run_ctx</code>),
 and requested resources (covered in the following sections).</p>
 
 <ul>
-<li><code>in_data</code> and <code>out_data</code> represent the input and output tensors, respectively.
+  <li><code class="highlighter-rouge">in_data</code> and <code class="highlighter-rouge">out_data</code> represent the input and output tensors, respectively.
 All of the tensor spaces have been allocated by the system.</li>
-<li><p><code>req</code> denotes how the computation results are written into the <code>out_data</code>.
-In other words, <code>req.size() == out_data.size()</code> and <code>req[i]</code>
-correspond to the write type of <code>out_data[i]</code>.</p></li>
-<li><p>The <code>OpReqType</code> is defined as:</p></li>
+  <li>
+    <p><code class="highlighter-rouge">req</code> denotes how the computation results are written into the <code class="highlighter-rouge">out_data</code>.
+In other words, <code class="highlighter-rouge">req.size() == out_data.size()</code> and <code class="highlighter-rouge">req[i]</code>
+correspond to the write type of <code class="highlighter-rouge">out_data[i]</code>.</p>
+  </li>
+  <li>The <code class="highlighter-rouge">OpReqType</code> is defined as:</li>
 </ul>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">           <span class="k">enum</span> <span class="n">OpReqType</span> <span class="p">{</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>           <span class="k">enum</span> <span class="n">OpReqType</span> <span class="p">{</span>
              <span class="n">kNullOp</span><span class="p">,</span>
              <span class="n">kWriteTo</span><span class="p">,</span>
              <span class="n">kWriteInplace</span><span class="p">,</span>
              <span class="n">kAddTo</span>
            <span class="p">};</span>
-</code></pre></div>
-<p>Normally, the types of all <code>out_data</code> should be <code>kWriteTo</code>,
-  meaning that the provided <code>out_data</code> tensor is a <em>raw</em> memory block,
+</code></pre></div></div>
+<p>Normally, the types of all <code class="highlighter-rouge">out_data</code> should be <code class="highlighter-rouge">kWriteTo</code>,
+  meaning that the provided <code class="highlighter-rouge">out_data</code> tensor is a <em>raw</em> memory block,
   so the operator should write results directly into it.
-  In some cases, for example when calculating the <code>gradient</code> tensor,
+  In some cases, for example when calculating the <code class="highlighter-rouge">gradient</code> tensor,
   it would be great if we could accumulate the result,
   rather than directly overwrite the tensor contents
   so that  no extra space needs to be created each time.
-  In such a case, the corresponding <code>req</code> type is set as <code>kAddTo</code>,
-  indicating that a <code>+=</code> should be called.</p>
+  In such a case, the corresponding <code class="highlighter-rouge">req</code> type is set as <code class="highlighter-rouge">kAddTo</code>,
+  indicating that a <code class="highlighter-rouge">+=</code> should be called.</p>
 
 <ul>
-<li><code>aux_states</code> is intentionally designed for auxiliary tensors used to help computation. Currently, it is useless.</li>
+  <li><code class="highlighter-rouge">aux_states</code> is intentionally designed for auxiliary tensors used to help computation. Currently, it is useless.</li>
 </ul>
 
-<p>Aside from the <code>Forward</code> operator, you could optionally implement the <code>Backward</code> interface:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="k">virtual</span> <span class="kt">void</span> <span class="nf">Backward</span><span class="p">(</span><span class="k">const</span> <span class="n">OpContext</span> <span class="o">&amp;</span><span class="n">ctx</span><span class="p">,</span>
+<p>Aside from the <code class="highlighter-rouge">Forward</code> operator, you could optionally implement the <code class="highlighter-rouge">Backward</code> interface:</p>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">virtual</span> <span class="kt">void</span> <span class="nf">Backward</span><span class="p">(</span><span class="k">const</span> <span class="n">OpContext</span> <span class="o">&amp;</span><span class="n">ctx</span><span class="p">,</span>
                           <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">TBlob</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">out_grad</span><span class="p">,</span>
                           <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">TBlob</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">in_data</span><span class="p">,</span>
                           <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">TBlob</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">out_data</span><span class="p">,</span>
                           <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">OpReqType</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">req</span><span class="p">,</span>
                           <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">TBlob</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">in_grad</span><span class="p">,</span>
                           <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">TBlob</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">aux_states</span><span class="p">);</span>
-</code></pre></div>
-<p>This interface follows the same design principle as the <code>Forward</code> interface,
-except that <code>out_grad</code>, <code>in_data</code>, and <code>out_data</code> are given,
-and the operator computes <code>in_grad</code> as the results.
- The naming strategy is similar to Torch&#39;s convention,
+</code></pre></div></div>
+<p>This interface follows the same design principle as the <code class="highlighter-rouge">Forward</code> interface,
+except that <code class="highlighter-rouge">out_grad</code>, <code class="highlighter-rouge">in_data</code>, and <code class="highlighter-rouge">out_data</code> are given,
+and the operator computes <code class="highlighter-rouge">in_grad</code> as the results.
+ The naming strategy is similar to Torch’s convention,
  and can be summarized in following figure:</p>
 
 <p>[input/output semantics figure]</p>
 
 <p>Some operators might not require all of the following:
-<code>out_grad</code>, <code>in_data</code> and <code>out_data</code>.
-You can specify these dependencies with the <code>DeclareBackwardDependency</code> interface in <code>OperatorProperty</code>.</p>
+<code class="highlighter-rouge">out_grad</code>, <code class="highlighter-rouge">in_data</code> and <code class="highlighter-rouge">out_data</code>.
+You can specify these dependencies with the <code class="highlighter-rouge">DeclareBackwardDependency</code> interface in <code class="highlighter-rouge">OperatorProperty</code>.</p>
 
 <h3 id="operator-property">Operator Property</h3>
 
 <p>One convolution might have several implementations,
 and you might want to switch among them to achieve the best performance.
 Therefore, we separate the operator <em>semantic</em> interfaces
-from the implementation interface (<code>Operator</code> class)
-into the <code>OperatorProperty</code> class.
-The <code>OperatorProperty</code> interface consists of:</p>
+from the implementation interface (<code class="highlighter-rouge">Operator</code> class)
+into the <code class="highlighter-rouge">OperatorProperty</code> class.
+The <code class="highlighter-rouge">OperatorProperty</code> interface consists of:</p>
 
 <ul>
-<li><strong>InferShape:</strong></li>
+  <li><strong>InferShape:</strong></li>
 </ul>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">           <span class="k">virtual</span> <span class="kt">bool</span> <span class="n">InferShape</span><span class="p">(</span><span class="n">mxnet</span><span class="o">::</span><span class="n">ShapeVector</span> <span class="o">*</span><span class="n">in_shape</span><span class="p">,</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>           <span class="k">virtual</span> <span class="kt">bool</span> <span class="n">InferShape</span><span class="p">(</span><span class="n">mxnet</span><span class="o">::</span><span class="n">ShapeVector</span> <span class="o">*</span><span class="n">in_shape</span><span class="p">,</span>
                                    <span class="n">mxnet</span><span class="o">::</span><span class="n">ShapeVector</span> <span class="o">*</span><span class="n">out_shape</span><span class="p">,</span>
                                    <span class="n">mxnet</span><span class="o">::</span><span class="n">ShapeVector</span> <span class="o">*</span><span class="n">aux_shape</span><span class="p">)</span> <span class="k">const</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span>
-</code></pre></div>
-<p>This interface has two purposes:
-* Tell the system the size of each input and output tensor,
-  so it can allocate space for them before the <code>Forward</code> and <code>Backward</code> call.
-* Perform a size check to make sure that there isn&#39;t an obvious error before running.
-  The shape in <code>in_shape</code> is set by the system
-  (from the <code>out_shape</code> of the previous operators).
-  It returns <code>false</code> when there is not enough information
-  to infer shapes or throws an error when the shape is inconsistent.</p>
+</code></pre></div></div>
 
+<p>This interface has two purposes:</p>
 <ul>
-<li><strong>Request Resources:</strong> Operations like <code>cudnnConvolutionForward</code> need a work space for computation.
+  <li>Tell the system the size of each input and output tensor,
+so it can allocate space for them before the <code class="highlighter-rouge">Forward</code> and <code class="highlighter-rouge">Backward</code> call.</li>
+  <li>
+    <p>Perform a size check to make sure that there isn’t an obvious error before running.
+The shape in <code class="highlighter-rouge">in_shape</code> is set by the system
+(from the <code class="highlighter-rouge">out_shape</code> of the previous operators).
+It returns <code class="highlighter-rouge">false</code> when there is not enough information
+to infer shapes or throws an error when the shape is inconsistent.</p>
+  </li>
+  <li><strong>Request Resources:</strong> Operations like <code class="highlighter-rouge">cudnnConvolutionForward</code> need a work space for computation.
 If the system can manage that, it could then perform optimizations,
 like reuse the space, and so on.
 MXNet defines two interfaces to achieve this:</li>
 </ul>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">           <span class="k">virtual</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">ResourceRequest</span><span class="o">&gt;</span> <span class="n">ForwardResource</span><span class="p">(</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>           <span class="k">virtual</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">ResourceRequest</span><span class="o">&gt;</span> <span class="n">ForwardResource</span><span class="p">(</span>
                <span class="k">const</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">ShapeVector</span> <span class="o">&amp;</span><span class="n">in_shape</span><span class="p">)</span> <span class="k">const</span><span class="p">;</span>
            <span class="k">virtual</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">ResourceRequest</span><span class="o">&gt;</span> <span class="n">BackwardResource</span><span class="p">(</span>
                <span class="k">const</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">ShapeVector</span> <span class="o">&amp;</span><span class="n">in_shape</span><span class="p">)</span> <span class="k">const</span><span class="p">;</span>
-</code></pre></div>
-<p>The <code>ResourceRequest</code> structure (in <code>resource.h</code>) currently contains only a type flag:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">           <span class="k">struct</span> <span class="n">ResourceRequest</span> <span class="p">{</span>
+</code></pre></div></div>
+<p>The <code class="highlighter-rouge">ResourceRequest</code> structure (in <code class="highlighter-rouge">resource.h</code>) currently contains only a type flag:</p>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>           <span class="k">struct</span> <span class="n">ResourceRequest</span> <span class="p">{</span>
              <span class="k">enum</span> <span class="n">Type</span> <span class="p">{</span>
                <span class="n">kRandom</span><span class="p">,</span>  <span class="c1">// get a mshadow::Random&lt;xpu&gt; object</span>
                <span class="n">kTempSpace</span><span class="p">,</span>  <span class="c1">// request temporary space</span>
              <span class="p">};</span>
              <span class="n">Type</span> <span class="n">type</span><span class="p">;</span>
            <span class="p">};</span>
-</code></pre></div>
-<p>If <code>ForwardResource</code> and <code>BackwardResource</code> return non-empty arrays,
-  the system offers the corresponding resources through the <code>ctx</code> parameter
-  in the <code>Forward</code> and <code>Backward</code> interface of <code>Operator</code>.
+</code></pre></div></div>
+<p>If <code class="highlighter-rouge">ForwardResource</code> and <code class="highlighter-rouge">BackwardResource</code> return non-empty arrays,
+  the system offers the corresponding resources through the <code class="highlighter-rouge">ctx</code> parameter
+  in the <code class="highlighter-rouge">Forward</code> and <code class="highlighter-rouge">Backward</code> interface of <code class="highlighter-rouge">Operator</code>.
   Basically, to access those resources, simply write:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">           <span class="k">auto</span> <span class="n">tmp_space_res</span> <span class="o">=</span> <span class="n">ctx</span><span class="p">.</span><span class="n">requested</span><span class="p">[</span><span class="n">kTempSpace</span><span class="p">].</span><span class="n">get_space</span><span class="p">(</span><span class="n">some_shape</span><span class="p">,</span> <span class="n">some_stream</span><span class="p">);</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>           <span class="k">auto</span> <span class="n">tmp_space_res</span> <span class="o">=</span> <span class="n">ctx</span><span class="p">.</span><span class="n">requested</span><span class="p">[</span><span class="n">kTempSpace</span><span class="p">].</span><span class="n">get_space</span><span class="p">(</span><span class="n">some_shape</span><span class="p">,</span> <span class="n">some_stream</span><span class="p">);</span>
            <span class="k">auto</span> <span class="n">rand_res</span> <span class="o">=</span> <span class="n">ctx</span><span class="p">.</span><span class="n">requested</span><span class="p">[</span><span class="n">kRandom</span><span class="p">].</span><span class="n">get_random</span><span class="p">(</span><span class="n">some_stream</span><span class="p">);</span>
-</code></pre></div>
-<p>For an example, see <code>src/operator/cudnn_convolution-inl.h</code>.</p>
+</code></pre></div></div>
+<p>For an example, see <code class="highlighter-rouge">src/operator/cudnn_convolution-inl.h</code>.</p>
 
 <ul>
-<li><strong>Backward dependency:</strong> Let&#39;s look at two different operator signatures
+  <li><strong>Backward dependency:</strong> Let’s look at two different operator signatures
 (we name all of the arguments for demonstration purposes):</li>
 </ul>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">           <span class="kt">void</span> <span class="nf">FullyConnectedForward</span><span class="p">(</span><span class="n">TBlob</span> <span class="n">weight</span><span class="p">,</span> <span class="n">TBlob</span> <span class="n">in_data</span><span class="p">,</span> <span class="n">TBlob</span> <span class="n">out_data</span><span class="p">);</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>           <span class="kt">void</span> <span class="nf">FullyConnectedForward</span><span class="p">(</span><span class="n">TBlob</span> <span class="n">weight</span><span class="p">,</span> <span class="n">TBlob</span> <span class="n">in_data</span><span class="p">,</span> <span class="n">TBlob</span> <span class="n">out_data</span><span class="p">);</span>
            <span class="kt">void</span> <span class="nf">FullyConnectedBackward</span><span class="p">(</span><span class="n">TBlob</span> <span class="n">weight</span><span class="p">,</span> <span class="n">TBlob</span> <span class="n">in_data</span><span class="p">,</span> <span class="n">TBlob</span> <span class="n">out_grad</span><span class="p">,</span> <span class="n">TBlob</span> <span class="n">in_grad</span><span class="p">);</span>
 
            <span class="kt">void</span> <span class="nf">PoolingForward</span><span class="p">(</span><span class="n">TBlob</span> <span class="n">in_data</span><span class="p">,</span> <span class="n">TBlob</span> <span class="n">out_data</span><span class="p">);</span>
            <span class="kt">void</span> <span class="nf">PoolingBackward</span><span class="p">(</span><span class="n">TBlob</span> <span class="n">in_data</span><span class="p">,</span> <span class="n">TBlob</span> <span class="n">out_data</span><span class="p">,</span> <span class="n">TBlob</span> <span class="n">out_grad</span><span class="p">,</span> <span class="n">TBlob</span> <span class="n">in_grad</span><span class="p">);</span>
-</code></pre></div>
-<p>Note that <code>out_data</code> in <code>FullyConnectedForward</code>
-  is not used by <code>FullyConnectedBackward</code>,
-  while <code>PoolingBackward</code> requires all of the arguments of <code>PoolingForward</code>.
-  Therefore, for <code>FullyConnectedForward</code>,
-  the <code>out_data</code> tensor once consumed could be safely freed
+</code></pre></div></div>
+<p>Note that <code class="highlighter-rouge">out_data</code> in <code class="highlighter-rouge">FullyConnectedForward</code>
+  is not used by <code class="highlighter-rouge">FullyConnectedBackward</code>,
+  while <code class="highlighter-rouge">PoolingBackward</code> requires all of the arguments of <code class="highlighter-rouge">PoolingForward</code>.
+  Therefore, for <code class="highlighter-rouge">FullyConnectedForward</code>,
+  the <code class="highlighter-rouge">out_data</code> tensor once consumed could be safely freed
   because the backward function will not need it.
   This provides a chance for the system to collect some tensors
   as garbage as soon as possible.
   To specify this situation, we provide an interface:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">          <span class="k">virtual</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">int</span><span class="o">&gt;</span> <span class="n">DeclareBackwardDependency</span><span class="p">(</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>          <span class="k">virtual</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">int</span><span class="o">&gt;</span> <span class="n">DeclareBackwardDependency</span><span class="p">(</span>
                <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">int</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">out_grad</span><span class="p">,</span>
                <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">int</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">in_data</span><span class="p">,</span>
                <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">int</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">out_data</span><span class="p">)</span> <span class="k">const</span><span class="p">;</span>
-</code></pre></div>
-<p>The <code>int</code> element of the argument vector is an ID
+</code></pre></div></div>
+<p>The <code class="highlighter-rouge">int</code> element of the argument vector is an ID
   to distinguish different arrays.
-  Let&#39;s see how this interface specifies different dependencies
-  for <code>FullyConnected</code> and <code>Pooling</code>:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">           <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">int</span><span class="o">&gt;</span> <span class="n">FullyConnectedProperty</span><span class="o">::</span><span class="n">DeclareBackwardDependency</span><span class="p">(</span>
+  Let’s see how this interface specifies different dependencies
+  for <code class="highlighter-rouge">FullyConnected</code> and <code class="highlighter-rouge">Pooling</code>:</p>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>           <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">int</span><span class="o">&gt;</span> <span class="n">FullyConnectedProperty</span><span class="o">::</span><span class="n">DeclareBackwardDependency</span><span class="p">(</span>
                <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">int</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">out_grad</span><span class="p">,</span>
                <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">int</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">in_data</span><span class="p">,</span>
                <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">int</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">out_data</span><span class="p">)</span> <span class="k">const</span> <span class="p">{</span>
@@ -670,15 +815,17 @@
                <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">int</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">out_data</span><span class="p">)</span> <span class="k">const</span> <span class="p">{</span>
              <span class="k">return</span> <span class="p">{</span><span class="n">out_grad</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">in_data</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">out_data</span><span class="p">[</span><span class="mi">0</span><span class="p">]};</span>
            <span class="p">}</span>
-</code></pre></div>
+</code></pre></div></div>
+
 <ul>
-<li><strong>In place Option:</strong> To further save the cost of memory allocation,
+  <li><strong>In place Option:</strong> To further save the cost of memory allocation,
 you can use in-place updates.
 They are appropriate for element-wise operations
 when the input tensor and output tensor have the same shape.
 You specify and in-place update with the following interface:</li>
 </ul>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">           <span class="k">virtual</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">std</span><span class="o">::</span><span class="n">pair</span><span class="o">&lt;</span><span class="kt">int</span><span class="p">,</span> <span class="kt">void</span><span class="o">*&gt;&gt;</span>    <span class="n">ElewiseOpProperty</span><span class="o">::</span><span class="n">ForwardInplaceOption</span><span class="p">(</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>           <span class="k">virtual</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">std</span><span class="o">::</span><span class="n">pair</span><span class="o">&lt;</span><span class="kt">int</span><span class="p">,</span> <span class="kt">void</span><span class="o">*&gt;&gt;</span>    <span class="n">ElewiseOpProperty</span><span class="o">::</span><span class="n">ForwardInplaceOption</span><span class="p">(</span>
                <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">int</span><span class="o">&gt;</span> <span class="o">&amp;</span><span class="n">in_data</span><span class="p">,</span>
                <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">void</span><span class="o">*&gt;</span> <span class="o">&amp;</span><span class="n">out_data</span><span class="p">)</span> <span class="k">const</span> <span class="p">{</span>
              <span class="k">return</span> <span class="p">{</span> <span class="p">{</span><span class="n">in_data</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">out_data</span><span class="p">[</span><span class="mi">0</span><span class="p">]}</span> <span class="p">};</span>
@@ -690,17 +837,18 @@
                <span class="k">const</span> <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="kt">void</span><span class="o">*&gt;</span> <span class="o">&amp;</span><span class="n">in_grad</span><span class="p">)</span> <span class="k">const</span> <span class="p">{</span>
              <span class="k">return</span> <span class="p">{</span> <span class="p">{</span><span class="n">out_grad</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">in_grad</span><span class="p">[</span><span class="mi">0</span><span class="p">]}</span> <span class="p">}</span>
            <span class="p">}</span>
-</code></pre></div>
-<p>This tells the system that the <code>in_data[0]</code> and <code>out_data[0]</code> tensors could share the same memory spaces during <code>Forward</code>, and so do <code>out_grad[0]</code> and <code>in_grad[0]</code> during <code>Backward</code>.</p>
+</code></pre></div></div>
+<p>This tells the system that the <code class="highlighter-rouge">in_data[0]</code> and <code class="highlighter-rouge">out_data[0]</code> tensors could share the same memory spaces during <code class="highlighter-rouge">Forward</code>, and so do <code class="highlighter-rouge">out_grad[0]</code> and <code class="highlighter-rouge">in_grad[0]</code> during <code class="highlighter-rouge">Backward</code>.</p>
 
 <blockquote>
-<p><strong>Important:</strong> Even if you use the preceding specification, it&#39;s <em>not</em> guaranteed that the input and output tensors will share the same space. In fact, this is only a suggestion for the system, which makes the final decision. However, in either case, the decision is completely transparent to you, so the actual <code>Forward</code> and <code>Backward</code> implementation does not need to consider that.</p>
+  <p><strong>Important:</strong> Even if you use the preceding specification, it’s <em>not</em> guaranteed that the input and output tensors will share the same space. In fact, this is only a suggestion for the system, which makes the final decision. However, in either case, the decision is completely transparent to you, so the actual <code class="highlighter-rouge">Forward</code> and <code class="highlighter-rouge">Backward</code> implementation does not need to consider that.</p>
 </blockquote>
 
 <ul>
-<li><strong>Expose Operator to Python:</strong> Because of the restrictions of C++, you need user to implement following interfaces:</li>
+  <li><strong>Expose Operator to Python:</strong> Because of the restrictions of C++, you need user to implement following interfaces:</li>
 </ul>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">           <span class="c1">// initial the property class from a list of key-value string pairs</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>           <span class="c1">// initial the property class from a list of key-value string pairs</span>
            <span class="k">virtual</span> <span class="kt">void</span> <span class="n">Init</span><span class="p">(</span><span class="k">const</span> <span class="n">vector</span><span class="o">&lt;</span><span class="n">pair</span><span class="o">&lt;</span><span class="n">string</span><span class="p">,</span> <span class="n">string</span><span class="o">&gt;&gt;</span> <span class="o">&amp;</span><span class="n">kwargs</span><span class="p">)</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span>
            <span class="c1">// return the parameters in a key-value string map</span>
            <span class="k">virtual</span> <span class="n">map</span><span class="o">&lt;</span><span class="n">string</span><span class="p">,</span> <span class="n">string</span><span class="o">&gt;</span> <span class="n">GetParams</span><span class="p">()</span> <span class="k">const</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span>
@@ -714,18 +862,20 @@
            <span class="k">virtual</span> <span class="kt">int</span> <span class="n">NumOutputs</span><span class="p">()</span> <span class="k">const</span><span class="p">;</span>
            <span class="c1">// return the number of visible outputs</span>
            <span class="k">virtual</span> <span class="kt">int</span> <span class="n">NumVisibleOutputs</span><span class="p">()</span> <span class="k">const</span><span class="p">;</span>
-</code></pre></div>
+</code></pre></div></div>
+
 <h3 id="create-an-operator-from-the-operator-property">Create an Operator from the Operator Property</h3>
 
-<p><code>OperatorProperty</code> includes all <em>semantic</em> attributes of an operation. It&#39;s also responsible for creating the <code>Operator</code> pointer for actual computation.</p>
+<p><code class="highlighter-rouge">OperatorProperty</code> includes all <em>semantic</em> attributes of an operation. It’s also responsible for creating the <code class="highlighter-rouge">Operator</code> pointer for actual computation.</p>
 
 <h4 id="create-operator">Create Operator</h4>
+<p>Implement the following interface in <code class="highlighter-rouge">OperatorProperty</code>:</p>
 
-<p>Implement the following interface in <code>OperatorProperty</code>:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="k">virtual</span> <span class="n">Operator</span><span class="o">*</span> <span class="n">CreateOperator</span><span class="p">(</span><span class="n">Context</span> <span class="n">ctx</span><span class="p">)</span> <span class="k">const</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span>
-</code></pre></div>
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">virtual</span> <span class="n">Operator</span><span class="o">*</span> <span class="n">CreateOperator</span><span class="p">(</span><span class="n">Context</span> <span class="n">ctx</span><span class="p">)</span> <span class="k">const</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span>
+</code></pre></div></div>
 <p>For example:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="k">class</span> <span class="nc">ConvolutionOp</span> <span class="p">{</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">class</span> <span class="nc">ConvolutionOp</span> <span class="p">{</span>
      <span class="nl">public:</span>
       <span class="kt">void</span> <span class="n">Forward</span><span class="p">(</span> <span class="p">...</span> <span class="p">)</span> <span class="p">{</span> <span class="p">...</span> <span class="p">}</span>
       <span class="kt">void</span> <span class="n">Backward</span><span class="p">(</span> <span class="p">...</span> <span class="p">)</span> <span class="p">{</span> <span class="p">...</span> <span class="p">}</span>
@@ -736,23 +886,25 @@
         <span class="k">return</span> <span class="k">new</span> <span class="n">ConvolutionOp</span><span class="p">;</span>
       <span class="p">}</span>
     <span class="p">};</span>
-</code></pre></div>
-<h4 id="parametrize-operator">Parametrize Operator</h4>
+</code></pre></div></div>
 
+<h4 id="parametrize-operator">Parametrize Operator</h4>
 <p>When implementing a convolution operator, you need to know the kernel size,
 the stride size, padding size, and so on.
 These parameters should be passed to the operator
-before any <code>Forward</code> or <code>Backward</code> interface is called.
-To do so, you could define a <code>ConvolutionParam</code> structure, as follows:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="cp">#include &lt;dmlc/parameter.h&gt;
+before any <code class="highlighter-rouge">Forward</code> or <code class="highlighter-rouge">Backward</code> interface is called.
+To do so, you could define a <code class="highlighter-rouge">ConvolutionParam</code> structure, as follows:</p>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="cp">#include &lt;dmlc/parameter.h&gt;
 </span>    <span class="k">struct</span> <span class="n">ConvolutionParam</span> <span class="o">:</span> <span class="k">public</span> <span class="n">dmlc</span><span class="o">::</span><span class="n">Parameter</span><span class="o">&lt;</span><span class="n">ConvolutionParam</span><span class="o">&gt;</span> <span class="p">{</span>
       <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span> <span class="n">kernel</span><span class="p">,</span> <span class="n">stride</span><span class="p">,</span> <span class="n">pad</span><span class="p">;</span>
       <span class="kt">uint32_t</span> <span class="n">num_filter</span><span class="p">,</span> <span class="n">num_group</span><span class="p">,</span> <span class="n">workspace</span><span class="p">;</span>
       <span class="kt">bool</span> <span class="n">no_bias</span><span class="p">;</span>
     <span class="p">};</span>
-</code></pre></div>
-<p>Put it in <code>ConvolutionOpProperty</code>, and pass it to the operator class during construction:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="k">class</span> <span class="nc">ConvolutionOp</span> <span class="p">{</span>
+</code></pre></div></div>
+<p>Put it in <code class="highlighter-rouge">ConvolutionOpProperty</code>, and pass it to the operator class during construction:</p>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">class</span> <span class="nc">ConvolutionOp</span> <span class="p">{</span>
      <span class="nl">public:</span>
       <span class="n">ConvolutionOp</span><span class="p">(</span><span class="n">ConvolutionParam</span> <span class="n">p</span><span class="p">)</span><span class="o">:</span> <span class="n">param_</span><span class="p">(</span><span class="n">p</span><span class="p">)</span> <span class="p">{}</span>
       <span class="kt">void</span> <span class="n">Forward</span><span class="p">(</span> <span class="p">...</span> <span class="p">)</span> <span class="p">{</span> <span class="p">...</span> <span class="p">}</span>
@@ -771,39 +923,39 @@
      <span class="nl">private:</span>
       <span class="n">ConvolutionParam</span> <span class="n">param_</span><span class="p">;</span>
     <span class="p">};</span>
-</code></pre></div>
-<h4 id="register-the-operator-property-class-and-the-parameter-class-to-mxnet">Register the Operator Property Class and the Parameter Class to MXNet</h4>
+</code></pre></div></div>
 
+<h4 id="register-the-operator-property-class-and-the-parameter-class-to-mxnet">Register the Operator Property Class and the Parameter Class to MXNet</h4>
 <p>Use the following macros to register the parameter structure and the operator property class to MXNet:</p>
-<div class="highlight"><pre><code class="language-c++" data-lang="c++">    <span class="n">DMLC_REGISTER_PARAMETER</span><span class="p">(</span><span class="n">ConvolutionParam</span><span class="p">);</span>
+
+<div class="language-c++ highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="n">DMLC_REGISTER_PARAMETER</span><span class="p">(</span><span class="n">ConvolutionParam</span><span class="p">);</span>
     <span class="n">MXNET_REGISTER_OP_PROPERTY</span><span class="p">(</span><span class="n">Convolution</span><span class="p">,</span> <span class="n">ConvolutionOpProperty</span><span class="p">);</span>
-</code></pre></div>
+</code></pre></div></div>
 <p>The first argument is the name string, the second is the property class name.</p>
 
 <h3 id="interface-summary">Interface Summary</h3>
 
-<p>We&#39;ve almost covered the entire interface required to define a new operator. Let&#39;s do a recap:</p>
+<p>We’ve almost covered the entire interface required to define a new operator. Let’s do a recap:</p>
 
 <ul>
-<li>Use the <code>Operator</code> interface to write your computation logic (<code>Forward</code> and <code>Backward</code>).</li>
-<li>Use the <code>OperatorProperty</code> interface to:
-
-<ul>
-<li>Pass the parameter to the operator class (you can use the <code>Init</code> interface).</li>
-<li>Create an operator using the <code>CreateOperator</code> interface.</li>
-<li>Correctly implement the operator description interface, such as the names of arguments, etc.</li>
-<li>Correctly implement the <code>InferShape</code> interface to set the output tensor shape.</li>
-<li>[Optional] If additional resources are needed, check <code>ForwardResource</code> and <code>BackwardResource</code>.</li>
-<li>[Optional] If <code>Backward</code> doesn&#39;t need all of the input and output of <code>Forward</code>, check <code>DeclareBackwardDependency</code>.</li>
-<li>[Optional] If in-place update is supported, check <code>ForwardInplaceOption</code> and <code>BackwardInplaceOption</code>.</li>
-</ul></li>
-<li>Register the <code>OperatorProperty</code> class and the parameter class.</li>
+  <li>Use the <code class="highlighter-rouge">Operator</code> interface to write your computation logic (<code class="highlighter-rouge">Forward</code> and <code class="highlighter-rouge">Backward</code>).</li>
+  <li>Use the <code class="highlighter-rouge">OperatorProperty</code> interface to:
+    <ul>
+      <li>Pass the parameter to the operator class (you can use the <code class="highlighter-rouge">Init</code> interface).</li>
+      <li>Create an operator using the <code class="highlighter-rouge">CreateOperator</code> interface.</li>
+      <li>Correctly implement the operator description interface, such as the names of arguments, etc.</li>
+      <li>Correctly implement the <code class="highlighter-rouge">InferShape</code> interface to set the output tensor shape.</li>
+      <li>[Optional] If additional resources are needed, check <code class="highlighter-rouge">ForwardResource</code> and <code class="highlighter-rouge">BackwardResource</code>.</li>
+      <li>[Optional] If <code class="highlighter-rouge">Backward</code> doesn’t need all of the input and output of <code class="highlighter-rouge">Forward</code>, check <code class="highlighter-rouge">DeclareBackwardDependency</code>.</li>
+      <li>[Optional] If in-place update is supported, check <code class="highlighter-rouge">ForwardInplaceOption</code> and <code class="highlighter-rouge">BackwardInplaceOption</code>.</li>
+    </ul>
+  </li>
+  <li>Register the <code class="highlighter-rouge">OperatorProperty</code> class and the parameter class.</li>
 </ul>
 
 <h2 id="unifying-the-ndarray-operator-and-symbolic-operator">Unifying the NDArray Operator and Symbolic Operator</h2>
-
 <p>NDArray operations are similar to symbolic operations,
-except that sometimes you can&#39;t write in place to the operands
+except that sometimes you can’t write in place to the operands
 without a complete dependency graph.
 However, the logic underlying NDArray and symbolic operations are almost identical.
 <em>SimpleOp</em>, a new unified operator API,
@@ -816,7 +968,7 @@
 <p>Consider the elements of an operation.
 Ideally, you need only functions and derivatives
 to describe an operation.
-Let&#39;s restrict that to the space of unary and binary operations.
+Let’s restrict that to the space of unary and binary operations.
 How do we classify all operations to maximize the possibility
 of in-place write optimization?
 Note that you can separate functions by the number of operands.
@@ -829,66 +981,69 @@
 <p>Before you learn more about the SimpleOp interface,
  we recommend that you review the
  <a href="https://github.com/dmlc/mshadow/tree/master/guide">mshadow library guide</a>
- because  calculations will be done in the <code>mshadow::TBlob</code> structure.</p>
+ because  calculations will be done in the <code class="highlighter-rouge">mshadow::TBlob</code> structure.</p>
 
-<p>In the following example, we&#39;ll create an operator
+<p>In the following example, we’ll create an operator
 functioning as a smooth l1 loss,
 which is a mixture of l1 loss and l2 loss. The loss itself can be written as:</p>
-<div class="highlight"><pre><code class="language-" data-lang="">    loss = outside_weight .* f(inside_weight .* (data - label))
+
+<div class="highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    loss = outside_weight .* f(inside_weight .* (data - label))
     grad = outside_weight .* inside_weight .* f'(inside_weight .* (data - label))
-</code></pre></div>
-<p><code>.*</code> stands for element-wise multiplication, and <code>f</code>, <code>f&#39;</code> is the smooth l1 loss function,
-which we are assuming is in <code>mshadow</code> for now.
-At first glance, it&#39;s impossible to implement
+</code></pre></div></div>
+<p><code class="highlighter-rouge">.*</code> stands for element-wise multiplication, and <code class="highlighter-rouge">f</code>, <code class="highlighter-rouge">f'</code> is the smooth l1 loss function,
+which we are assuming is in <code class="highlighter-rouge">mshadow</code> for now.
+At first glance, it’s impossible to implement
 this particular loss as a unary or binary operator.
 But we have automatic differentiation in symbolic execution.
-That simplifies the loss to <code>f</code> and <code>f&#39;</code> directly.
-This loss is no more complex than a <code>sin</code> or an <code>abs</code> function,
+That simplifies the loss to <code class="highlighter-rouge">f</code> and <code class="highlighter-rouge">f'</code> directly.
+This loss is no more complex than a <code class="highlighter-rouge">sin</code> or an <code class="highlighter-rouge">abs</code> function,
 and can certainly be implemented as a unary operator.</p>
 
 <h2 id="simpleop-the-unified-operator-api">SimpleOp: The Unified Operator API</h2>
-
 <h3 id="define-shapes">Define Shapes</h3>
-
-<p>The <code>mshadow</code> library requires explicit memory allocation.
+<p>The <code class="highlighter-rouge">mshadow</code> library requires explicit memory allocation.
 As a consequence, all data shapes
 must be provided before any calculation occurs.
  Before we proceed with defining functions and gradient,
-let&#39;s check input data shape consistency and provide output shape.</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="k">typedef</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span> <span class="p">(</span><span class="o">*</span><span class="n">UnaryShapeFunction</span><span class="p">)(</span><span class="k">const</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span><span class="o">&amp;</span> <span class="n">src</span><span class="p">,</span>
+let’s check input data shape consistency and provide output shape.</p>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">typedef</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span> <span class="p">(</span><span class="o">*</span><span class="n">UnaryShapeFunction</span><span class="p">)(</span><span class="k">const</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span><span class="o">&amp;</span> <span class="n">src</span><span class="p">,</span>
                                          <span class="k">const</span> <span class="n">EnvArguments</span><span class="o">&amp;</span> <span class="n">env</span><span class="p">);</span>
     <span class="k">typedef</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span> <span class="p">(</span><span class="o">*</span><span class="n">BinaryShapeFunction</span><span class="p">)(</span><span class="k">const</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span><span class="o">&amp;</span> <span class="n">lhs</span><span class="p">,</span>
                                           <span class="k">const</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span><span class="o">&amp;</span> <span class="n">rhs</span><span class="p">,</span>
                                           <span class="k">const</span> <span class="n">EnvArguments</span><span class="o">&amp;</span> <span class="n">env</span><span class="p">);</span>
-</code></pre></div>
-<p>You can use <code>mshadow::TShape</code> to check input data shape and designate output data shape.
-If you don&#39;t define this function, the default output shape is the same as the input shape.
-In the case of a binary operator, the shape of <code>lhs</code> and <code>rhs</code> is checked as the same by default.</p>
+</code></pre></div></div>
+<p>You can use <code class="highlighter-rouge">mshadow::TShape</code> to check input data shape and designate output data shape.
+If you don’t define this function, the default output shape is the same as the input shape.
+In the case of a binary operator, the shape of <code class="highlighter-rouge">lhs</code> and <code class="highlighter-rouge">rhs</code> is checked as the same by default.</p>
 
 <p>You can also use shape functions to check if any additional arguments and resources are present.
-Refer to the additional usages of <code>EnvArguments</code> to accomplish this.</p>
+Refer to the additional usages of <code class="highlighter-rouge">EnvArguments</code> to accomplish this.</p>
 
-<p>Before we start on our smooth l1 loss example, we define a <code>XPU</code> to <code>cpu</code> or <code>gpu</code> in the header
-<code>smooth_l1_unary-inl.h</code> implementation so that we reuse the same code in <code>smooth_l1_unary.cc</code> and
-<code>smooth_l1_unary.cu</code>.</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="cp">#include &lt;mxnet/operator_util.h&gt;
+<p>Before we start on our smooth l1 loss example, we define a <code class="highlighter-rouge">XPU</code> to <code class="highlighter-rouge">cpu</code> or <code class="highlighter-rouge">gpu</code> in the header
+<code class="highlighter-rouge">smooth_l1_unary-inl.h</code> implementation so that we reuse the same code in <code class="highlighter-rouge">smooth_l1_unary.cc</code> and
+<code class="highlighter-rouge">smooth_l1_unary.cu</code>.</p>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="cp">#include &lt;mxnet/operator_util.h&gt;
 </span>    <span class="cp">#if defined(__CUDACC__)
 </span>    <span class="cp">#define XPU gpu
 </span>    <span class="cp">#else
 </span>    <span class="cp">#define XPU cpu
 </span>    <span class="cp">#endif
-</span></code></pre></div>
-<p>In our smooth l1 loss example, it&#39;s okay to use the default behavior whereby the output has the same shape as the source.
+</span></code></pre></div></div>
+<p>In our smooth l1 loss example, it’s okay to use the default behavior whereby the output has the same shape as the source.
 Written explicitly, it is:</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="kr">inline</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span> <span class="nf">SmoothL1Shape_</span><span class="p">(</span><span class="k">const</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span><span class="o">&amp;</span> <span class="n">src</span><span class="p">,</span>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="kr">inline</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span> <span class="nf">SmoothL1Shape_</span><span class="p">(</span><span class="k">const</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span><span class="o">&amp;</span> <span class="n">src</span><span class="p">,</span>
                                  <span class="k">const</span> <span class="n">EnvArguments</span><span class="o">&amp;</span> <span class="n">env</span><span class="p">)</span> <span class="p">{</span>
       <span class="k">return</span> <span class="n">mxnet</span><span class="o">::</span><span class="n">TShape</span><span class="p">(</span><span class="n">src</span><span class="p">);</span>
     <span class="p">}</span>
-</code></pre></div>
-<h3 id="define-functions">Define Functions</h3>
+</code></pre></div></div>
 
-<p>Create a unary or binary function with one output: <code>mshadow::TBlob</code>.</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="k">typedef</span> <span class="nf">void</span> <span class="p">(</span><span class="o">*</span><span class="n">UnaryFunction</span><span class="p">)(</span><span class="k">const</span> <span class="n">TBlob</span><span class="o">&amp;</span> <span class="n">src</span><span class="p">,</span>
+<h3 id="define-functions">Define Functions</h3>
+<p>Create a unary or binary function with one output: <code class="highlighter-rouge">mshadow::TBlob</code>.</p>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">typedef</span> <span class="nf">void</span> <span class="p">(</span><span class="o">*</span><span class="n">UnaryFunction</span><span class="p">)(</span><span class="k">const</span> <span class="n">TBlob</span><span class="o">&amp;</span> <span class="n">src</span><span class="p">,</span>
                                   <span class="k">const</span> <span class="n">EnvArguments</span><span class="o">&amp;</span> <span class="n">env</span><span class="p">,</span>
                                   <span class="n">TBlob</span><span class="o">*</span> <span class="n">ret</span><span class="p">,</span>
                                   <span class="n">OpReqType</span> <span class="n">req</span><span class="p">,</span>
@@ -899,30 +1054,35 @@
                                    <span class="n">TBlob</span><span class="o">*</span> <span class="n">ret</span><span class="p">,</span>
                                    <span class="n">OpReqType</span> <span class="n">req</span><span class="p">,</span>
                                    <span class="n">RunContext</span> <span class="n">ctx</span><span class="p">);</span>
-</code></pre></div>
+</code></pre></div></div>
 <ul>
-<li>Functions are differentiated by the types of input arguments.</li>
-<li><code>RunContext ctx</code> contains information needed during runtime for execution.</li>
+  <li>Functions are differentiated by the types of input arguments.</li>
+  <li><code class="highlighter-rouge">RunContext ctx</code> contains information needed during runtime for execution.</li>
 </ul>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">        <span class="k">struct</span> <span class="n">RunContext</span> <span class="p">{</span>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>        <span class="k">struct</span> <span class="n">RunContext</span> <span class="p">{</span>
           <span class="kt">void</span> <span class="o">*</span><span class="n">stream</span><span class="p">;</span>  <span class="c1">// the stream of the device, can be NULL or Stream&lt;gpu&gt;* in GPU mode</span>
           <span class="k">template</span><span class="o">&lt;</span><span class="k">typename</span> <span class="n">xpu</span><span class="o">&gt;</span> <span class="kr">inline</span> <span class="n">mshadow</span><span class="o">::</span><span class="n">Stream</span><span class="o">&lt;</span><span class="n">xpu</span><span class="o">&gt;*</span> <span class="n">get_stream</span><span class="p">()</span> <span class="c1">// get mshadow stream from Context</span>
         <span class="p">}</span>  <span class="c1">// namespace mxnet</span>
-</code></pre></div>
-<p><code>mshadow::stream&lt;xpu&gt; *s = ctx.get_stream&lt;xpu&gt;();</code> is an example of obtaining a stream from <code>ctx</code>.
-* <code>OpReqType req</code> denotes how computation results are written into <code>ret</code>.</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">        <span class="k">enum</span> <span class="n">OpReqType</span> <span class="p">{</span>
+</code></pre></div></div>
+<p><code class="highlighter-rouge">mshadow::stream&lt;xpu&gt; *s = ctx.get_stream&lt;xpu&gt;();</code> is an example of obtaining a stream from <code class="highlighter-rouge">ctx</code>.</p>
+<ul>
+  <li><code class="highlighter-rouge">OpReqType req</code> denotes how computation results are written into <code class="highlighter-rouge">ret</code>.</li>
+</ul>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>        <span class="k">enum</span> <span class="n">OpReqType</span> <span class="p">{</span>
           <span class="n">kNullOp</span><span class="p">,</span>  <span class="c1">// no operation, do not write anything</span>
           <span class="n">kWriteTo</span><span class="p">,</span>  <span class="c1">// write gradient to provided space</span>
           <span class="n">kWriteInplace</span><span class="p">,</span>  <span class="c1">// perform an in-place write</span>
           <span class="n">kAddTo</span>  <span class="c1">// add to the provided space</span>
         <span class="p">};</span>
-</code></pre></div>
-<p>A macro is defined in <code>operator_util.h</code> for a simplified use of <code>OpReqType</code>.
-  <code>ASSIGN_DISPATCH(out, req, exp)</code> checks <code>req</code> and performs an assignment.</p>
+</code></pre></div></div>
+<p>A macro is defined in <code class="highlighter-rouge">operator_util.h</code> for a simplified use of <code class="highlighter-rouge">OpReqType</code>.
+  <code class="highlighter-rouge">ASSIGN_DISPATCH(out, req, exp)</code> checks <code class="highlighter-rouge">req</code> and performs an assignment.</p>
 
-<p>In our smooth l1 loss example, we use <code>UnaryFunction</code> to define the function of this operator.</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="k">template</span><span class="o">&lt;</span><span class="k">typename</span> <span class="n">xpu</span><span class="o">&gt;</span>
+<p>In our smooth l1 loss example, we use <code class="highlighter-rouge">UnaryFunction</code> to define the function of this operator.</p>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">template</span><span class="o">&lt;</span><span class="k">typename</span> <span class="n">xpu</span><span class="o">&gt;</span>
     <span class="kt">void</span> <span class="nf">SmoothL1Forward_</span><span class="p">(</span><span class="k">const</span> <span class="n">TBlob</span><span class="o">&amp;</span> <span class="n">src</span><span class="p">,</span>
                           <span class="k">const</span> <span class="n">EnvArguments</span><span class="o">&amp;</span> <span class="n">env</span><span class="p">,</span>
                           <span class="n">TBlob</span> <span class="o">*</span><span class="n">ret</span><span class="p">,</span>
@@ -939,16 +1099,16 @@
                         <span class="n">F</span><span class="o">&lt;</span><span class="n">mshadow_op</span><span class="o">::</span><span class="n">smooth_l1_loss</span><span class="o">&gt;</span><span class="p">(</span><span class="n">in</span><span class="p">,</span> <span class="n">ScalarExp</span><span class="o">&lt;</span><span class="n">DType</span><span class="o">&gt;</span><span class="p">(</span><span class="n">sigma2</span><span class="p">)));</span>
       <span class="p">});</span>
     <span class="p">}</span>
-</code></pre></div>
-<p>After obtaining <code>mshadow::Stream</code> from <code>RunContext</code>, we get <code>mshadow::Tensor</code> from <code>mshadow::TBlob</code>.
-<code>mshadow::F</code> is a shortcut to initiate a <code>mshadow</code> expression. The macro <code>MSHADOW_TYPE_SWITCH(type, DType, ...)</code>
-handles details on different types, and the macro <code>ASSIGN_DISPATCH(out, req, exp)</code> checks <code>OpReqType</code> and
-performs actions accordingly. <code>sigma2</code> is a special parameter in this loss, which we will cover later.</p>
+</code></pre></div></div>
+<p>After obtaining <code class="highlighter-rouge">mshadow::Stream</code> from <code class="highlighter-rouge">RunContext</code>, we get <code class="highlighter-rouge">mshadow::Tensor</code> from <code class="highlighter-rouge">mshadow::TBlob</code>.
+<code class="highlighter-rouge">mshadow::F</code> is a shortcut to initiate a <code class="highlighter-rouge">mshadow</code> expression. The macro <code class="highlighter-rouge">MSHADOW_TYPE_SWITCH(type, DType, ...)</code>
+handles details on different types, and the macro <code class="highlighter-rouge">ASSIGN_DISPATCH(out, req, exp)</code> checks <code class="highlighter-rouge">OpReqType</code> and
+performs actions accordingly. <code class="highlighter-rouge">sigma2</code> is a special parameter in this loss, which we will cover later.</p>
 
 <h3 id="define-gradients-optional">Define Gradients (Optional)</h3>
-
 <p>Create a gradient function with various types of inputs.</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="c1">// depending only on out_grad</span>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="c1">// depending only on out_grad</span>
     <span class="k">typedef</span> <span class="nf">void</span> <span class="p">(</span><span class="o">*</span><span class="n">UnaryGradFunctionT0</span><span class="p">)(</span><span class="k">const</span> <span class="n">OutputGrad</span><span class="o">&amp;</span> <span class="n">out_grad</span><span class="p">,</span>
                                         <span class="k">const</span> <span class="n">EnvArguments</span><span class="o">&amp;</span> <span class="n">env</span><span class="p">,</span>
                                         <span class="n">TBlob</span><span class="o">*</span> <span class="n">in_grad</span><span class="p">,</span>
@@ -968,24 +1128,27 @@
                                         <span class="n">TBlob</span><span class="o">*</span> <span class="n">in_grad</span><span class="p">,</span>
                                         <span class="n">OpReqType</span> <span class="n">req</span><span class="p">,</span>
                                         <span class="n">RunContext</span> <span class="n">ctx</span><span class="p">);</span>
-</code></pre></div>
-<p>Gradient functions of binary operators have similar structures, except that <code>Input</code>, <code>TBlob</code>, and <code>OpReqType</code>
+</code></pre></div></div>
+<p>Gradient functions of binary operators have similar structures, except that <code class="highlighter-rouge">Input</code>, <code class="highlighter-rouge">TBlob</code>, and <code class="highlighter-rouge">OpReqType</code>
 are doubled.</p>
 
-<p><code>GradFunctionArgument</code></p>
+<p><code class="highlighter-rouge">GradFunctionArgument</code></p>
 
-<p><code>Input0</code>, <code>Input</code>, <code>OutputValue</code>, and <code>OutputGrad</code> all share the structure of <code>GradFunctionArgument</code>,
+<p><code class="highlighter-rouge">Input0</code>, <code class="highlighter-rouge">Input</code>, <code class="highlighter-rouge">OutputValue</code>, and <code class="highlighter-rouge">OutputGrad</code> all share the structure of <code class="highlighter-rouge">GradFunctionArgument</code>,
   which is defined as:</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">      <span class="k">struct</span> <span class="n">GradFunctionArgument</span> <span class="p">{</span>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>      <span class="k">struct</span> <span class="n">GradFunctionArgument</span> <span class="p">{</span>
           <span class="n">TBlob</span> <span class="n">data</span><span class="p">;</span>
       <span class="p">}</span>
-</code></pre></div>
-<p>In our smooth l1 loss example, note that it&#39;s an <code>f&#39;(x)</code>,
+</code></pre></div></div>
+
+<p>In our smooth l1 loss example, note that it’s an <code class="highlighter-rouge">f'(x)</code>,
 which utilizes input for the gradient calculation,
-so the <code>UnaryGradFunctionT2</code> is suitable.
+so the <code class="highlighter-rouge">UnaryGradFunctionT2</code> is suitable.
 To enable the chain rule of the gradient,
-we also need to multiply <code>out_grad</code> from the top to the result of <code>in_grad</code>.</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="k">template</span><span class="o">&lt;</span><span class="k">typename</span> <span class="n">xpu</span><span class="o">&gt;</span>
+we also need to multiply <code class="highlighter-rouge">out_grad</code> from the top to the result of <code class="highlighter-rouge">in_grad</code>.</p>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">template</span><span class="o">&lt;</span><span class="k">typename</span> <span class="n">xpu</span><span class="o">&gt;</span>
     <span class="kt">void</span> <span class="nf">SmoothL1BackwardUseIn_</span><span class="p">(</span><span class="k">const</span> <span class="n">OutputGrad</span><span class="o">&amp;</span> <span class="n">out_grad</span><span class="p">,</span>
                                 <span class="k">const</span> <span class="n">Input0</span><span class="o">&amp;</span> <span class="n">in_data0</span><span class="p">,</span>
                                 <span class="k">const</span> <span class="n">EnvArguments</span><span class="o">&amp;</span> <span class="n">env</span><span class="p">,</span>
@@ -1004,103 +1167,108 @@
                         <span class="n">ograd</span> <span class="o">*</span> <span class="n">F</span><span class="o">&lt;</span><span class="n">mshadow_op</span><span class="o">::</span><span class="n">smooth_l1_gradient</span><span class="o">&gt;</span><span class="p">(</span><span class="n">src</span><span class="p">,</span> <span class="n">ScalarExp</span><span class="o">&lt;</span><span class="n">DType</span><span class="o">&gt;</span><span class="p">(</span><span class="n">sigma2</span><span class="p">)));</span>
       <span class="p">});</span>
     <span class="p">}</span>
-</code></pre></div>
-<h3 id="register-simpleop-to-mxnet">Register SimpleOp to MXNet</h3>
+</code></pre></div></div>
 
+<h3 id="register-simpleop-to-mxnet">Register SimpleOp to MXNet</h3>
 <p>After creating the shape, function, and gradient, restore them into both an NDArray operator and
-a symbolic operator. To simplify this process, use the registration macro defined in <code>operator_util.h</code>.</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="n">MXNET_REGISTER_SIMPLE_OP</span><span class="p">(</span><span class="n">Name</span><span class="p">,</span> <span class="n">DEV</span><span class="p">)</span>
+a symbolic operator. To simplify this process, use the registration macro defined in <code class="highlighter-rouge">operator_util.h</code>.</p>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="n">MXNET_REGISTER_SIMPLE_OP</span><span class="p">(</span><span class="n">Name</span><span class="p">,</span> <span class="n">DEV</span><span class="p">)</span>
     <span class="p">.</span><span class="n">set_shape_function</span><span class="p">(</span><span class="n">Shape</span><span class="p">)</span>
     <span class="p">.</span><span class="n">set_function</span><span class="p">(</span><span class="n">DEV</span><span class="o">::</span><span class="n">kDevMask</span><span class="p">,</span> <span class="n">Function</span><span class="o">&lt;</span><span class="n">XPU</span><span class="o">&gt;</span><span class="p">,</span> <span class="n">SimpleOpInplaceOption</span><span class="p">)</span>
     <span class="p">.</span><span class="n">set_gradient</span><span class="p">(</span><span class="n">DEV</span><span class="o">::</span><span class="n">kDevMask</span><span class="p">,</span> <span class="n">Gradient</span><span class="o">&lt;</span><span class="n">XPU</span><span class="o">&gt;</span><span class="p">,</span> <span class="n">SimpleOpInplaceOption</span><span class="p">)</span>
     <span class="p">.</span><span class="n">describe</span><span class="p">(</span><span class="s">"description"</span><span class="p">);</span>
-</code></pre></div>
-<p><code>SimpleOpInplaceOption</code> is defined as:</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="k">enum</span> <span class="n">SimpleOpInplaceOption</span> <span class="p">{</span>
+</code></pre></div></div>
+<p><code class="highlighter-rouge">SimpleOpInplaceOption</code> is defined as:</p>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">enum</span> <span class="n">SimpleOpInplaceOption</span> <span class="p">{</span>
       <span class="n">kNoInplace</span><span class="p">,</span>  <span class="c1">// do not allow inplace in arguments</span>
       <span class="n">kInplaceInOut</span><span class="p">,</span>  <span class="c1">// allow inplace in with out (unary)</span>
       <span class="n">kInplaceOutIn</span><span class="p">,</span>  <span class="c1">// allow inplace out_grad with in_grad (unary)</span>
       <span class="n">kInplaceLhsOut</span><span class="p">,</span>  <span class="c1">// allow inplace left operand with out (binary)</span>
       <span class="n">kInplaceOutLhs</span>  <span class="c1">// allow inplace out_grad with lhs_grad (binary)</span>
     <span class="p">};</span>
-</code></pre></div>
-<p>In our example, we have a gradient function that relies on input data, so the function can&#39;t be written in
+</code></pre></div></div>
+
+<p>In our example, we have a gradient function that relies on input data, so the function can’t be written in
 place. The output gradient has no purpose after gradient computation, so the gradient can be written in place.</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="n">MXNET_REGISTER_SIMPLE_OP</span><span class="p">(</span><span class="n">smooth_l1</span><span class="p">,</span> <span class="n">XPU</span><span class="p">)</span>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="n">MXNET_REGISTER_SIMPLE_OP</span><span class="p">(</span><span class="n">smooth_l1</span><span class="p">,</span> <span class="n">XPU</span><span class="p">)</span>
     <span class="p">.</span><span class="n">set_function</span><span class="p">(</span><span class="n">XPU</span><span class="o">::</span><span class="n">kDevMask</span><span class="p">,</span> <span class="n">SmoothL1Forward_</span><span class="o">&lt;</span><span class="n">XPU</span><span class="o">&gt;</span><span class="p">,</span> <span class="n">kNoInplace</span><span class="p">)</span>
     <span class="p">.</span><span class="n">set_gradient</span><span class="p">(</span><span class="n">XPU</span><span class="o">::</span><span class="n">kDevMask</span><span class="p">,</span> <span class="n">SmoothL1BackwardUseIn_</span><span class="o">&lt;</span><span class="n">XPU</span><span class="o">&gt;</span><span class="p">,</span> <span class="n">kInplaceOutIn</span><span class="p">)</span>
     <span class="p">.</span><span class="n">set_enable_scalar</span><span class="p">(</span><span class="nb">true</span><span class="p">)</span>
     <span class="p">.</span><span class="n">describe</span><span class="p">(</span><span class="s">"Calculate Smooth L1 Loss(lhs, scalar)"</span><span class="p">);</span>
-</code></pre></div>
-<p>Remember from the discussion of shape functions that a default behavior without <code>set_shape_function</code> forces the inputs
-(if they&#39;re binary) to be the same shape and yield the same shape for output. We&#39;ll discuss <code>set_enable_scalar</code> later.</p>
+</code></pre></div></div>
+<p>Remember from the discussion of shape functions that a default behavior without <code class="highlighter-rouge">set_shape_function</code> forces the inputs
+(if they’re binary) to be the same shape and yield the same shape for output. We’ll discuss <code class="highlighter-rouge">set_enable_scalar</code> later.</p>
 
 <h3 id="ndarray-operator-summary">NDArray Operator Summary</h3>
-
 <ul>
-<li>Create a shape function for determining the output shape.</li>
-<li>Create a function as the forward routine by choosing a suitable function type.</li>
-<li>Create a gradient as the backward routine by choosing a suitable gradient type.</li>
-<li>Register the operator using the registration process.</li>
+  <li>Create a shape function for determining the output shape.</li>
+  <li>Create a function as the forward routine by choosing a suitable function type.</li>
+  <li>Create a gradient as the backward routine by choosing a suitable gradient type.</li>
+  <li>Register the operator using the registration process.</li>
 </ul>
 
 <h2 id="additional-information-on-simpleop">Additional Information on SimpleOp</h2>
-
 <h3 id="using-simpleop-on-envarguments">Using SimpleOp on EnvArguments</h3>
-
 <p>Some operations might need a scalar as input, such as a  gradient scale, a set of keyword arguments
-controlling behavior, or a temporary space to speed up calculations.<code>EnvArguments</code> provides additional arguments and resources to make calculations more scalable
+controlling behavior, or a temporary space to speed up calculations.<code class="highlighter-rouge">EnvArguments</code> provides additional arguments and resources to make calculations more scalable
 and efficient.</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="k">struct</span> <span class="n">EnvArguments</span> <span class="p">{</span>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">struct</span> <span class="n">EnvArguments</span> <span class="p">{</span>
       <span class="n">real_t</span> <span class="n">scalar</span><span class="p">;</span>  <span class="c1">// scalar argument, if enabled</span>
       <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">std</span><span class="o">::</span><span class="n">pair</span><span class="o">&lt;</span><span class="n">std</span><span class="o">::</span><span class="n">string</span><span class="p">,</span> <span class="n">std</span><span class="o">::</span><span class="n">string</span><span class="o">&gt;</span> <span class="o">&gt;</span> <span class="n">kwargs</span><span class="p">;</span>  <span class="c1">// keyword arguments</span>
       <span class="n">std</span><span class="o">::</span><span class="n">vector</span><span class="o">&lt;</span><span class="n">Resource</span><span class="o">&gt;</span> <span class="n">resource</span><span class="p">;</span>  <span class="c1">// pointer to the resources requested</span>
     <span class="p">};</span>
-</code></pre></div>
-<p>More registration parameters are required to enable these additional features. To prevent confusion on parameters, <code>scalar</code> and <code>kwargs</code>
-can&#39;t be present at the same time. To enable <code>scalar</code>, use
-<code>set_enable_scalar(bool enable_scalar)</code> in registration. Then, in forward functions and gradients, the <code>scalar</code> can be accessed from <code>env.scalar</code> as in the function parameter <code>EnvArguments env</code>.</p>
+</code></pre></div></div>
 
-<p>To enable <code>kwargs</code>, use <code>set_enable_kwargs(bool enable_kwargs)</code> in registration. Then, in forward
-functions and gradients, additional arguments are contained in <code>env.kwarg</code>, which is defined as
-<code>std::vector&lt;std::pair&lt;std::string, std::string&gt; &gt;</code>. Use the DMLC parameter structure to
+<p>More registration parameters are required to enable these additional features. To prevent confusion on parameters, <code class="highlighter-rouge">scalar</code> and <code class="highlighter-rouge">kwargs</code>
+can’t be present at the same time. To enable <code class="highlighter-rouge">scalar</code>, use
+<code class="highlighter-rouge">set_enable_scalar(bool enable_scalar)</code> in registration. Then, in forward functions and gradients, the <code class="highlighter-rouge">scalar</code> can be accessed from <code class="highlighter-rouge">env.scalar</code> as in the function parameter <code class="highlighter-rouge">EnvArguments env</code>.</p>
+
+<p>To enable <code class="highlighter-rouge">kwargs</code>, use <code class="highlighter-rouge">set_enable_kwargs(bool enable_kwargs)</code> in registration. Then, in forward
+functions and gradients, additional arguments are contained in <code class="highlighter-rouge">env.kwarg</code>, which is defined as
+<code class="highlighter-rouge">std::vector&lt;std::pair&lt;std::string, std::string&gt; &gt;</code>. Use the DMLC parameter structure to
 simplify parsing keyword arguments. For more details, see the <a href="https://github.com/dmlc/dmlc-core/blob/master/doc/parameter.md">guide on parameter structure</a>.</p>
 
-<p>Additional resources like <code>mshadow::Random&lt;xpu&gt;</code> and temporary memory space can also be requested and
-accessed from <code>EnvArguments.resource</code>. The registration routine is <code>set_resource_request(ResourceRequest req)</code>
-or <code>set_resource_request(const std::vector&lt;ResourceRequest&gt;)</code>, where <code>mxnet::ResourceRequest</code> is defined as:</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="k">struct</span> <span class="n">ResourceRequest</span> <span class="p">{</span>
+<p>Additional resources like <code class="highlighter-rouge">mshadow::Random&lt;xpu&gt;</code> and temporary memory space can also be requested and
+accessed from <code class="highlighter-rouge">EnvArguments.resource</code>. The registration routine is <code class="highlighter-rouge">set_resource_request(ResourceRequest req)</code>
+or <code class="highlighter-rouge">set_resource_request(const std::vector&lt;ResourceRequest&gt;)</code>, where <code class="highlighter-rouge">mxnet::ResourceRequest</code> is defined as:</p>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">struct</span> <span class="n">ResourceRequest</span> <span class="p">{</span>
       <span class="k">enum</span> <span class="n">Type</span> <span class="p">{</span>  <span class="c1">// Resource type, indicating what the pointer type is</span>
         <span class="n">kRandom</span><span class="p">,</span>  <span class="c1">// mshadow::Random&lt;xpu&gt; object</span>
         <span class="n">kTempSpace</span>  <span class="c1">// A dynamic temp space that can be arbitrary size</span>
       <span class="p">};</span>
       <span class="n">Type</span> <span class="n">type</span><span class="p">;</span>  <span class="c1">// type of resources</span>
     <span class="p">};</span>
-</code></pre></div>
-<p>Registration will request the declared resource requests from <code>mxnet::ResourceManager</code>, and place resources
-in <code>std::vector&lt;Resource&gt; resource</code> in <code>EnvArguments</code>. To access resources, use the following:</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="k">auto</span> <span class="n">tmp_space_res</span> <span class="o">=</span> <span class="n">env</span><span class="p">.</span><span class="n">resources</span><span class="p">[</span><span class="mi">0</span><span class="p">].</span><span class="n">get_space</span><span class="p">(</span><span class="n">some_shape</span><span class="p">,</span> <span class="n">some_stream</span><span class="p">);</span>
+</code></pre></div></div>
+<p>Registration will request the declared resource requests from <code class="highlighter-rouge">mxnet::ResourceManager</code>, and place resources
+in <code class="highlighter-rouge">std::vector&lt;Resource&gt; resource</code> in <code class="highlighter-rouge">EnvArguments</code>. To access resources, use the following:</p>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">auto</span> <span class="n">tmp_space_res</span> <span class="o">=</span> <span class="n">env</span><span class="p">.</span><span class="n">resources</span><span class="p">[</span><span class="mi">0</span><span class="p">].</span><span class="n">get_space</span><span class="p">(</span><span class="n">some_shape</span><span class="p">,</span> <span class="n">some_stream</span><span class="p">);</span>
     <span class="k">auto</span> <span class="n">rand_res</span> <span class="o">=</span> <span class="n">env</span><span class="p">.</span><span class="n">resources</span><span class="p">[</span><span class="mi">0</span><span class="p">].</span><span class="n">get_random</span><span class="p">(</span><span class="n">some_stream</span><span class="p">);</span>
-</code></pre></div>
-<p>For an example, see <code>src/operator/loss_binary_op-inl.h</code>.</p>
+</code></pre></div></div>
+<p>For an example, see <code class="highlighter-rouge">src/operator/loss_binary_op-inl.h</code>.</p>
 
 <p>In our smooth l1 loss example, a scalar input is needed to mark the turning point of a loss function. Therefore,
-in the registration process, we use <code>set_enable_scalar(true)</code>, and use <code>env.scalar</code> in function and gradient
+in the registration process, we use <code class="highlighter-rouge">set_enable_scalar(true)</code>, and use <code class="highlighter-rouge">env.scalar</code> in function and gradient
 declarations.</p>
 
 <h3 id="crafting-a-tensor-operation">Crafting a Tensor Operation</h3>
-
-<p>Because computation utilizes the <code>mshadow</code> library and we sometimes don&#39;t have functions readily available, we
+<p>Because computation utilizes the <code class="highlighter-rouge">mshadow</code> library and we sometimes don’t have functions readily available, we
 can craft tensor operations in operator implementations. If you define such functions as element-wise, you
-can implement them as a <code>mxnet::op::mshadow_op</code>. <code>src/operator/mshadow_op.h</code> that contains a lot of <code>mshadow_op</code>,
-for example. <code>mshadow_op</code> are expression mappers. They deal with the scalar case of desired functions. For details, see
+can implement them as a <code class="highlighter-rouge">mxnet::op::mshadow_op</code>. <code class="highlighter-rouge">src/operator/mshadow_op.h</code> that contains a lot of <code class="highlighter-rouge">mshadow_op</code>,
+for example. <code class="highlighter-rouge">mshadow_op</code> are expression mappers. They deal with the scalar case of desired functions. For details, see
 <a href="https://github.com/dmlc/mshadow/tree/master/doc">mshadow expression API guide</a>.</p>
 
-<p>If an operation can&#39;t be done in an element-wise way, like the softmax loss and gradient, then you need to create a new tensor operation. You need to create as <code>mshadow</code> function and as <code>mshadow::cuda</code>
-function directly. For details, see the <code>mshadow</code> library. For an example, see <code>src/operator/roi_pooling.cc</code>.</p>
+<p>If an operation can’t be done in an element-wise way, like the softmax loss and gradient, then you need to create a new tensor operation. You need to create as <code class="highlighter-rouge">mshadow</code> function and as <code class="highlighter-rouge">mshadow::cuda</code>
+function directly. For details, see the <code class="highlighter-rouge">mshadow</code> library. For an example, see <code class="highlighter-rouge">src/operator/roi_pooling.cc</code>.</p>
 
 <p>In our smooth l1 loss example, we create two mappers, namely the scalar cases of smooth l1 loss and gradient.</p>
-<div class="highlight"><pre><code class="language-cpp" data-lang="cpp">    <span class="k">namespace</span> <span class="n">mshadow_op</span> <span class="p">{</span>
+
+<div class="language-cpp highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">namespace</span> <span class="n">mshadow_op</span> <span class="p">{</span>
     <span class="k">struct</span> <span class="n">smooth_l1_loss</span> <span class="p">{</span>
       <span class="c1">// a is x, b is sigma2</span>
       <span class="n">MSHADOW_XINLINE</span> <span class="k">static</span> <span class="n">real_t</span> <span class="n">Map</span><span class="p">(</span><span class="n">real_t</span> <span class="n">a</span><span class="p">,</span> <span class="n">real_t</span> <span class="n">b</span><span class="p">)</span> <span class="p">{</span>
@@ -1114,11 +1282,10 @@
       <span class="p">}</span>
     <span class="p">};</span>
     <span class="p">}</span>
-</code></pre></div>
-<p>The gradient, which can be found in <code>src/operator/smooth_l1_unary-inl.h</code>, is similar.</p>
+</code></pre></div></div>
+<p>The gradient, which can be found in <code class="highlighter-rouge">src/operator/smooth_l1_unary-inl.h</code>, is similar.</p>
 
 <h3 id="beyond-two-operands">Beyond Two Operands</h3>
-
 <p>The new unified API is designed to fulfill the fundamentals of an operation. For operators with more than two inputs,
 more than one output, or that need more features, see the original <a href="overview#operators-in-mxnet">Operator API</a>.</p>
 
@@ -1136,8 +1303,7 @@
             <div class="col-4">
                 <h4 class="footer-category-title">Resources</h4>
                 <ul class="contact-list">
-                    <li><a class="u-email" href="mailto:dev@mxnet.apache.org">Dev list</a></li>
-                    <li><a class="u-email" href="mailto:user@mxnet.apache.org">User mailing list</a></li>
+                    <li><a href="/community/contribute#mxnet-dev-communications">Mailing lists</a></li>
                     <li><a href="https://cwiki.apache.org/confluence/display/MXNET/Apache+MXNet+Home">Developer Wiki</a></li>
                     <li><a href="https://issues.apache.org/jira/projects/MXNET/issues">Jira Tracker</a></li>
                     <li><a href="https://github.com/apache/incubator-mxnet/labels/Roadmap">Github Roadmap</a></li>
diff --git a/api/architecture/program_model.html b/api/architecture/program_model.html
index 03dcf7d..9cd9857 100644
--- a/api/architecture/program_model.html
+++ b/api/architecture/program_model.html
@@ -5,23 +5,23 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"><!-- Begin Jekyll SEO tag v2.6.1 -->
 <title>Deep Learning Programming Paradigm | Apache MXNet</title>
-<meta name="generator" content="Jekyll v3.8.6" />
+<meta name="generator" content="Jekyll v4.0.0" />
 <meta property="og:title" content="Deep Learning Programming Paradigm" />
 <meta property="og:locale" content="en_US" />
 <meta name="description" content="A flexible and efficient library for deep learning." />
 <meta property="og:description" content="A flexible and efficient library for deep learning." />
-<link rel="canonical" href="https://mxnet.incubator.apache.org/api/architecture/program_model" />
-<meta property="og:url" content="https://mxnet.incubator.apache.org/api/architecture/program_model" />
+<link rel="canonical" href="https://mxnet-beta.staged.apache.org//api/architecture/program_model" />
+<meta property="og:url" content="https://mxnet-beta.staged.apache.org//api/architecture/program_model" />
 <meta property="og:site_name" content="Apache MXNet" />
 <script type="application/ld+json">
-{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Deep Learning Programming Paradigm","url":"https://mxnet.incubator.apache.org/api/architecture/program_model","@context":"https://schema.org"}</script>
+{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Deep Learning Programming Paradigm","url":"https://mxnet-beta.staged.apache.org//api/architecture/program_model","@context":"https://schema.org"}</script>
 <!-- End Jekyll SEO tag -->
 <script src="https://medium-widget.pixelpoint.io/widget.js"></script>
-  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet.incubator.apache.org/feed.xml" title="Apache MXNet" /><script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
-
-  <script src="/assets/js/clipboard.js"></script>
-  <script src="/assets/js/copycode.js"></script>
-</head>
+  <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css" />
+  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet-beta.staged.apache.org//feed.xml" title="Apache MXNet" /><script src="/assets/js/jquery-3.3.1.min.js"></script><script src="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.js" defer></script>
+  <script src="/assets/js/globalSearch.js" defer></script>
+  <script src="/assets/js/clipboard.js" defer></script>
+  <script src="/assets/js/copycode.js" defer></script></head>
 <body><header class="site-header" role="banner">
 
   <script>
@@ -59,14 +59,173 @@
             </svg>
           </span>
       </label>
-
+      <div class="gs-search-border">
+        <div id="gs-search-icon"></div>
+        <form id="global-search-form">
+          <input id="global-search" type="text" title="Search" placeholder="Search" />
+          <div id="global-search-dropdown-container">
+            <button class="gs-current-version btn" type="button" data-toggle="dropdown">
+                <span id="gs-current-version-label">master</span>
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+          <span id="global-search-close">x</span>
+        </form>
+      </div>
       <div class="trigger">
+        <div id="global-search-mobile-border">
+          <div id="gs-search-icon-mobile"></div>
+          <input id="global-search-mobile" placeholder="Search..." type="text"/>
+          <div id="global-search-dropdown-container-mobile">
+            <button class="gs-current-version-mobile btn" type="button" data-toggle="dropdown">
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown-mobile">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+        </div>
         <a class="page-link" href="/get_started">Get Started</a>
         <a class="page-link" href="/blog">Blog</a>
         <a class="page-link" href="/features">Features</a>
         <a class="page-link" href="/ecosystem">Ecosystem</a>
         <a class="page-link" href="/api">Docs & Tutorials</a>
         <a class="page-link" href="https://github.com/apache/incubator-mxnet">GitHub</a>
+        <div class="dropdown">
+          <span class="dropdown-header">master
+            <svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
+          </span>
+          <div class="dropdown-content">
+            
+              
+                <a class="dropdown-option-active" href="/">master</a>
+              
+            
+              
+                <a href="/versions/1.6/">1.6</a>
+              
+            
+              
+                <a href="/versions/1.5.0/">1.5.0</a>
+              
+            
+              
+                <a href="/versions/1.4.1/">1.4.1</a>
+              
+            
+              
+                <a href="/versions/1.3.1/">1.3.1</a>
+              
+            
+              
+                <a href="/versions/1.2.1/">1.2.1</a>
+              
+            
+              
+                <a href="/versions/1.1.0/">1.1.0</a>
+              
+            
+              
+                <a href="/versions/1.0.0/">1.0.0</a>
+              
+            
+              
+                <a href="/versions/0.12.1/">0.12.1</a>
+              
+            
+              
+                <a href="/versions/0.11.0/">0.11.0</a>
+              
+            
+          </div>
+        </div>
       </div>
     </nav>
   </div>
@@ -128,8 +287,6 @@
             
               <!-- page-category -->
             
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/exception_handling">Exception Handling in MXNet</a></li>
               <!-- page-category -->
@@ -210,22 +367,6 @@
             
               <!-- page-category -->
             
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/note_data_loading">Efficient Data Loaders</a></li>
               <!-- page-category -->
@@ -238,14 +379,14 @@
             <li><a href="/api/architecture/note_memory">Memory Consumption</a></li>
               <!-- page-category -->
             
-              <!-- page-category -->
-            
             
             <li><a href="/api/architecture/overview">MXNet System Architecture</a></li>
               <!-- page-category -->
             
               <!-- page-category -->
             
+              <!-- page-category -->
+            
             
             <li><a href="/api/architecture/program_model">Deep Learning Programming Paradigm</a></li>
               <!-- page-category -->
@@ -281,42 +422,25 @@
               <!-- page-category -->
             
               <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
-            
-              <!-- page-category -->
                <!-- resource-p -->
         </ul>
     </div>
     <div class="col-9">
         <!--- Licensed to the Apache Software Foundation (ASF) under one -->
-
 <!--- or more contributor license agreements.  See the NOTICE file -->
-
 <!--- distributed with this work for additional information -->
-
 <!--- regarding copyright ownership.  The ASF licenses this file -->
-
 <!--- to you under the Apache License, Version 2.0 (the -->
-
 <!--- "License"); you may not use this file except in compliance -->
-
 <!--- with the License.  You may obtain a copy of the License at -->
 
 <!---   http://www.apache.org/licenses/LICENSE-2.0 -->
 
 <!--- Unless required by applicable law or agreed to in writing, -->
-
 <!--- software distributed under the License is distributed on an -->
-
 <!--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -->
-
 <!--- KIND, either express or implied.  See the License for the -->
-
 <!--- specific language governing permissions and limitations -->
-
 <!--- under the License. -->
 
 <h1 id="deep-learning-programming-paradigm">Deep Learning Programming Paradigm</h1>
@@ -328,26 +452,29 @@
 Complicating matters, of the many deep learning libraries out there,
 each has its own approach to programming style.</p>
 
-<p>In this document, we focus on two of the most important high-level design decisions:
-1. Whether to embrace the <em>symbolic</em> or <em>imperative</em> paradigm for mathematical computation.
-2. Whether to build networks with bigger (more abstract) or more atomic operations.</p>
+<p>In this document, we focus on two of the most important high-level design decisions:</p>
+<ol>
+  <li>Whether to embrace the <em>symbolic</em> or <em>imperative</em> paradigm for mathematical computation.</li>
+  <li>Whether to build networks with bigger (more abstract) or more atomic operations.</li>
+</ol>
 
-<p>Throughout, we&#39;ll focus on the programming models themselves.
+<p>Throughout, we’ll focus on the programming models themselves.
 When programming style decisions may impact performance, we point this out,
-but we don&#39;t dwell on specific implementation details.</p>
+but we don’t dwell on specific implementation details.</p>
 
 <h2 id="symbolic-vs-imperative-programs">Symbolic vs. Imperative Programs</h2>
 
-<p>If you are a Python or C++ programmer, then you&#39;re already familiar with imperative programs.
+<p>If you are a Python or C++ programmer, then you’re already familiar with imperative programs.
 Imperative-style programs perform computation as you run them.
 Most code you write in Python is imperative, as is the following NumPy snippet.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="n">np</span>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="n">np</span>
     <span class="n">a</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">(</span><span class="mi">10</span><span class="p">)</span>
     <span class="n">b</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">(</span><span class="mi">10</span><span class="p">)</span> <span class="o">*</span> <span class="mi">2</span>
     <span class="n">c</span> <span class="o">=</span> <span class="n">b</span> <span class="o">*</span> <span class="n">a</span>
     <span class="n">d</span> <span class="o">=</span> <span class="n">c</span> <span class="o">+</span> <span class="mi">1</span>
-</code></pre></div>
-<p>When the program executes <code>c = b * a</code>, it runs the actual numerical computation.</p>
+</code></pre></div></div>
+<p>When the program executes <code class="highlighter-rouge">c = b * a</code>, it runs the actual numerical computation.</p>
 
 <p>Symbolic programs are a bit different. With symbolic-style programs,
 we first define a (potentially complex) function abstractly.
@@ -356,20 +483,21 @@
 Then we can compile the function, and evaluate it given real inputs.
 In the following example, we rewrite the imperative program from above
 as a symbolic-style program:</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="n">A</span> <span class="o">=</span> <span class="n">Variable</span><span class="p">(</span><span class="s">'A'</span><span class="p">)</span>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="n">A</span> <span class="o">=</span> <span class="n">Variable</span><span class="p">(</span><span class="s">'A'</span><span class="p">)</span>
     <span class="n">B</span> <span class="o">=</span> <span class="n">Variable</span><span class="p">(</span><span class="s">'B'</span><span class="p">)</span>
     <span class="n">C</span> <span class="o">=</span> <span class="n">B</span> <span class="o">*</span> <span class="n">A</span>
     <span class="n">D</span> <span class="o">=</span> <span class="n">C</span> <span class="o">+</span> <span class="n">Constant</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
     <span class="c1"># compiles the function
 </span>    <span class="n">f</span> <span class="o">=</span> <span class="nb">compile</span><span class="p">(</span><span class="n">D</span><span class="p">)</span>
     <span class="n">d</span> <span class="o">=</span> <span class="n">f</span><span class="p">(</span><span class="n">A</span><span class="o">=</span><span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">(</span><span class="mi">10</span><span class="p">),</span> <span class="n">B</span><span class="o">=</span><span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">(</span><span class="mi">10</span><span class="p">)</span><span class="o">*</span><span class="mi">2</span><span class="p">)</span>
-</code></pre></div>
-<p>As you can see, in the symbolic version, when <code>C = B * A</code> is executed, no computation occurs.
+</code></pre></div></div>
+<p>As you can see, in the symbolic version, when <code class="highlighter-rouge">C = B * A</code> is executed, no computation occurs.
 Instead, this operation generates a <em>computation graph</em> (also called a <em>symbolic graph</em>)
 that represents the computation.
-The following figure shows a computation graph to compute <code>D</code>.</p>
+The following figure shows a computation graph to compute <code class="highlighter-rouge">D</code>.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/prog_model/comp_graph.png" alt="Comp Graph"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/prog_model/comp_graph.png" alt="Comp Graph" /></p>
 
 <p>Most symbolic-style programs contain, either explicitly or implicitly, a <em>compile</em> step.
 This converts the computation graph into a function that we can later call.
@@ -381,77 +509,80 @@
 <p>Among other popular deep learning libraries, Torch, Chainer, and Minerva embrace the imperative style.
 Examples of symbolic-style deep learning libraries include Theano, CGT, and TensorFlow.
 We might also view libraries like CXXNet and Caffe, which rely on configuration files, as symbolic-style libraries.
-In this interpretation, we&#39;d consider the content of the configuration file as defining the computation graph.</p>
+In this interpretation, we’d consider the content of the configuration file as defining the computation graph.</p>
 
-<p>Now that you understand the difference between these two programming models, let&#39;s compare the advantages of each.</p>
+<p>Now that you understand the difference between these two programming models, let’s compare the advantages of each.</p>
 
 <h3 id="imperative-programs-tend-to-be-more-flexible">Imperative Programs Tend to be More Flexible</h3>
 
-<p>When you&#39;re using an imperative-style library from Python, you are writing in Python.
+<p>When you’re using an imperative-style library from Python, you are writing in Python.
 Nearly anything that would be intuitive to write in Python, you could accelerate by calling down in the appropriate places to the imperative deep learning library.
 On the other hand, when you write a symbolic program, you may not have access to all the familiar Python constructs, like iteration.
 Consider the following imperative program, and think about how you can translate this into a symbolic program.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="n">a</span> <span class="o">=</span> <span class="mi">2</span>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="n">a</span> <span class="o">=</span> <span class="mi">2</span>
     <span class="n">b</span> <span class="o">=</span> <span class="n">a</span> <span class="o">+</span> <span class="mi">1</span>
     <span class="n">d</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">zeros</span><span class="p">(</span><span class="mi">10</span><span class="p">)</span>
     <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">d</span><span class="p">):</span>
         <span class="n">d</span> <span class="o">+=</span> <span class="n">np</span><span class="o">.</span><span class="n">zeros</span><span class="p">(</span><span class="mi">10</span><span class="p">)</span>
-</code></pre></div>
-<p>This wouldn&#39;t be so easy if the Python for-loop weren&#39;t supported by the symbolic API.
-When you write a symbolic program in Python, you&#39;re <em>not</em> writing in Python.
-Instead, you&#39;re writing in a domain-specific language (DSL) defined by the symbolic API.
+</code></pre></div></div>
+<p>This wouldn’t be so easy if the Python for-loop weren’t supported by the symbolic API.
+When you write a symbolic program in Python, you’re <em>not</em> writing in Python.
+Instead, you’re writing in a domain-specific language (DSL) defined by the symbolic API.
 The symbolic APIs found in deep learning libraries
 are powerful DSLs that generate callable computation graphs for neural networks.
-&lt;!-- In that sense, config-file input libraries are all symbolic. --&gt;</p>
+<!-- In that sense, config-file input libraries are all symbolic. --></p>
 
 <p>Intuitively, you might say that imperative programs
 are more <em>native</em> than symbolic programs.
-It&#39;s easier to use native language features.
-For example, it&#39;s straightforward to print out the values
+It’s easier to use native language features.
+For example, it’s straightforward to print out the values
 in the middle of computation or to use native control flow and loops
 at any point in the flow of computation.</p>
 
 <h3 id="symbolic-programs-tend-to-be-more-efficient">Symbolic Programs Tend to be More Efficient</h3>
 
-<p>As we&#39;ve seen, imperative programs tend to be flexible
+<p>As we’ve seen, imperative programs tend to be flexible
 and fit nicely into the programming flow of a host language.
 So you might wonder, why do so many deep learning libraries
 embrace the symbolic paradigm?
 The main reason is efficiency, both in terms of memory and speed.
-Let&#39;s revisit our toy example from before.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="n">np</span>
+Let’s revisit our toy example from before.</p>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="n">np</span>
     <span class="n">a</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">(</span><span class="mi">10</span><span class="p">)</span>
     <span class="n">b</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">(</span><span class="mi">10</span><span class="p">)</span> <span class="o">*</span> <span class="mi">2</span>
     <span class="n">c</span> <span class="o">=</span> <span class="n">b</span> <span class="o">*</span> <span class="n">a</span>
     <span class="n">d</span> <span class="o">=</span> <span class="n">c</span> <span class="o">+</span> <span class="mi">1</span>
     <span class="o">...</span>
-</code></pre></div>
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/prog_model/comp_graph.png" alt="Comp Graph"></p>
+</code></pre></div></div>
+
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/prog_model/comp_graph.png" alt="Comp Graph" /></p>
 
 <p>Assume that each cell in the array occupies 8 bytes of memory.
 How much memory do you need to execute this program in the Python console?</p>
 
 <p>As an imperative program we need to allocate memory at each line.
 That leaves us allocating 4 arrays of size 10.
-So we&#39;ll need <code>4 * 10 * 8 = 320</code> bytes.
+So we’ll need <code class="highlighter-rouge">4 * 10 * 8 = 320</code> bytes.
 On the other hand, if we built a computation graph,
-and knew in advance that we only needed <code>d</code>,
+and knew in advance that we only needed <code class="highlighter-rouge">d</code>,
 we could reuse the memory originally allocated for intermediate values.
 For example, by performing computations in-place,
-we might recycle the bits allocated for <code>b</code> to store <code>c</code>.
-And we might recycle the bits allocated for <code>c</code> to store <code>d</code>.
+we might recycle the bits allocated for <code class="highlighter-rouge">b</code> to store <code class="highlighter-rouge">c</code>.
+And we might recycle the bits allocated for <code class="highlighter-rouge">c</code> to store <code class="highlighter-rouge">d</code>.
 In the end we could cut our memory requirement in half,
-requiring just <code>2 * 10 * 8 = 160</code> bytes.</p>
+requiring just <code class="highlighter-rouge">2 * 10 * 8 = 160</code> bytes.</p>
 
 <p>Symbolic programs are more <em>restricted</em>.
-When we call <code>compile</code> on D, we tell the system
-that only the value of <code>d</code> is needed.
+When we call <code class="highlighter-rouge">compile</code> on D, we tell the system
+that only the value of <code class="highlighter-rouge">d</code> is needed.
 The intermediate values of the computation,
-in this case <code>c</code>, is then invisible to us.</p>
+in this case <code class="highlighter-rouge">c</code>, is then invisible to us.</p>
 
 <p>We benefit because the symbolic programs
 can then safely reuse the memory for in-place computation.
-But on the other hand, if we later decide that we need to access <code>c</code>, we&#39;re out of luck.
+But on the other hand, if we later decide that we need to access <code class="highlighter-rouge">c</code>, we’re out of luck.
 So imperative programs are better prepared to encounter all possible demands.
 If we ran the imperative version of the code in a Python console,
 we could inspect any of the intermediate variables in the future.</p>
@@ -469,9 +600,9 @@
 in optimized libraries, such as CXXNet and Caffe.
 Operation folding improves computation efficiency.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/prog_model/comp_graph_fold.png" alt="Comp Graph Folded"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/prog_model/comp_graph_fold.png" alt="Comp Graph Folded" /></p>
 
-<p>Note, you can&#39;t perform operation folding in imperative programs,
+<p>Note, you can’t perform operation folding in imperative programs,
 because the intermediate values might be referenced in the future.
 Operation folding is possible in symbolic programs
 because you get the entire computation graph,
@@ -482,11 +613,11 @@
 <p>In this section, we compare the two programming models
 on the problem of auto differentiation, or backpropagation.
 Differentiation is of vital importance in deep learning
-because it&#39;s the mechanism by which we train our models.
+because it’s the mechanism by which we train our models.
 In any deep learning model, we define a <em>loss function</em>.
 A <em>loss function</em> measures how far the model is from the desired output.
 We then typically pass over training examples (pairs of inputs and ground-truth outputs).
-At each step we update the model&#39;s <em>parameters</em> to minimize the loss.
+At each step we update the model’s <em>parameters</em> to minimize the loss.
 To determine the direction in which to update the parameters,
 we need to take the derivative of the loss function with respect to the parameters.</p>
 
@@ -494,15 +625,16 @@
 they had to work out the derivative calculations by hand.
 While the math is reasonably straightforward,
 for complex models, it can be time-consuming and tedious work.
-All modern deep learning libraries make the practitioner/researcher&#39;s job
+All modern deep learning libraries make the practitioner/researcher’s job
 much easier, by automatically solving the problem of gradient calculation.</p>
 
 <p>Both imperative and symbolic programs can perform gradient calculation.
-So let&#39;s take a look at how you might perform automatic differentiation with each.</p>
+So let’s take a look at how you might perform automatic differentiation with each.</p>
 
-<p>Let&#39;s start with imperative programs.
+<p>Let’s start with imperative programs.
 The following example Python code performs automatic differentiation using our toy example:</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="k">class</span> <span class="nc">array</span><span class="p">(</span><span class="nb">object</span><span class="p">)</span> <span class="p">:</span>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">class</span> <span class="nc">array</span><span class="p">(</span><span class="nb">object</span><span class="p">)</span> <span class="p">:</span>
         <span class="s">"""Simple Array object that support autodiff."""</span>
         <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">value</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
             <span class="bp">self</span><span class="o">.</span><span class="n">value</span> <span class="o">=</span> <span class="n">value</span>
@@ -535,16 +667,18 @@
     <span class="c1"># Results
 </span>    <span class="c1"># 3
 </span>    <span class="c1"># {'a': 2, 'b': 1}
-</span></code></pre></div>
+</span></code></pre></div></div>
+
 <p>In this code, each array object contains a grad function (it is actually a closure).
-When you run <code>d.grad</code>, it recursively invokes the grad function of its inputs,
+When you run <code class="highlighter-rouge">d.grad</code>, it recursively invokes the grad function of its inputs,
 backprops the gradient value back, and
 returns the gradient value of each input.</p>
 
-<p>This might look a bit complicated, so let&#39;s consider
+<p>This might look a bit complicated, so let’s consider
 the gradient calculation for symbolic programs.
 The following program performs symbolic gradient calculation for the same task.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="n">A</span> <span class="o">=</span> <span class="n">Variable</span><span class="p">(</span><span class="s">'A'</span><span class="p">)</span>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="n">A</span> <span class="o">=</span> <span class="n">Variable</span><span class="p">(</span><span class="s">'A'</span><span class="p">)</span>
     <span class="n">B</span> <span class="o">=</span> <span class="n">Variable</span><span class="p">(</span><span class="s">'B'</span><span class="p">)</span>
     <span class="n">C</span> <span class="o">=</span> <span class="n">B</span> <span class="o">*</span> <span class="n">A</span>
     <span class="n">D</span> <span class="o">=</span> <span class="n">C</span> <span class="o">+</span> <span class="n">Constant</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
@@ -553,45 +687,46 @@
     <span class="c1"># compiles the gradient function.
 </span>    <span class="n">f</span> <span class="o">=</span> <span class="nb">compile</span><span class="p">([</span><span class="n">gA</span><span class="p">,</span> <span class="n">gB</span><span class="p">])</span>
     <span class="n">grad_a</span><span class="p">,</span> <span class="n">grad_b</span> <span class="o">=</span> <span class="n">f</span><span class="p">(</span><span class="n">A</span><span class="o">=</span><span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">(</span><span class="mi">10</span><span class="p">),</span> <span class="n">B</span><span class="o">=</span><span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">(</span><span class="mi">10</span><span class="p">)</span><span class="o">*</span><span class="mi">2</span><span class="p">)</span>
-</code></pre></div>
-<p>The grad function of <code>D</code> generates a backward computation graph,
-and returns a gradient node, <code>gA, gB</code>,
+</code></pre></div></div>
+
+<p>The grad function of <code class="highlighter-rouge">D</code> generates a backward computation graph,
+and returns a gradient node, <code class="highlighter-rouge">gA, gB</code>,
 which correspond to the red nodes in the following figure.</p>
 
-<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/prog_model/comp_graph_backward.png" alt="Comp Graph Folded"></p>
+<p><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/prog_model/comp_graph_backward.png" alt="Comp Graph Folded" /></p>
 
 <p>The imperative program actually does the same thing as the symbolic program.
 It implicitly saves a backward computation graph in the grad closure.
-When you invoked <code>d.grad</code>, you start from <code>d(D)</code>,
+When you invoked <code class="highlighter-rouge">d.grad</code>, you start from <code class="highlighter-rouge">d(D)</code>,
 backtrack through the graph to compute the gradient, and collect the results.</p>
 
 <p>The gradient calculations in both symbolic
 and imperative programming follow the same pattern.
-What&#39;s the difference then?
+What’s the difference then?
 Recall the <em>be prepared to encounter all possible demands</em> requirement of imperative programs.
 If you are creating an array library that supports automatic differentiation,
 you have to keep the grad closure along with the computation.
 This means that none of the history variables can be
-garbage-collected because they are referenced by variable <code>d</code> by way of function closure.</p>
+garbage-collected because they are referenced by variable <code class="highlighter-rouge">d</code> by way of function closure.</p>
 
-<p>What if you want to compute only the value of <code>d</code>,
-and don&#39;t want the gradient value?
-In symbolic programming, you declare this with <code>f=compiled([D])</code>.
+<p>What if you want to compute only the value of <code class="highlighter-rouge">d</code>,
+and don’t want the gradient value?
+In symbolic programming, you declare this with <code class="highlighter-rouge">f=compiled([D])</code>.
 This also declares the boundary of computation,
 telling the system that you want to compute only the forward pass.
 As a result, the system can free the memory of previous results,
 and share the memory between inputs and outputs.</p>
 
-<p>Imagine running a deep neural network with <code>n</code> layers.
+<p>Imagine running a deep neural network with <code class="highlighter-rouge">n</code> layers.
 If you are running only the forward pass,
 not the backward(gradient) pass,
 you need to allocate only two copies of
 temporal space to store the values of the intermediate layers,
-instead of <code>n</code> copies of them.
+instead of <code class="highlighter-rouge">n</code> copies of them.
 However, because imperative programs need to be prepared
 to encounter all possible demands of getting the gradient,
 they have to store the intermediate values,
-which requires <code>n</code> copies of temporal space.</p>
+which requires <code class="highlighter-rouge">n</code> copies of temporal space.</p>
 
 <p>As you can see, the level of optimization depends
 on the restrictions on what you can do.
@@ -600,7 +735,7 @@
 One the other hand, imperative programs
 must be prepared for a wider range of demands.
 Symbolic programs have a natural advantage
-because they know more about what you do and don&#39;t want.</p>
+because they know more about what you do and don’t want.</p>
 
 <p>There are ways in which we can modify imperative programs
 to incorporate similar restrictions.
@@ -608,20 +743,22 @@
 problem is to introduce a context variable.
 You can introduce a no-gradient context variable
 to turn gradient calculation off.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="k">with</span> <span class="n">context</span><span class="o">.</span><span class="n">NoGradient</span><span class="p">():</span>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="k">with</span> <span class="n">context</span><span class="o">.</span><span class="n">NoGradient</span><span class="p">():</span>
         <span class="n">a</span> <span class="o">=</span> <span class="n">array</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="s">'a'</span><span class="p">)</span>
         <span class="n">b</span> <span class="o">=</span> <span class="n">array</span><span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="s">'b'</span><span class="p">)</span>
         <span class="n">c</span> <span class="o">=</span> <span class="n">b</span> <span class="o">*</span> <span class="n">a</span>
         <span class="n">d</span> <span class="o">=</span> <span class="n">c</span> <span class="o">+</span> <span class="mi">1</span>
-</code></pre></div>
+</code></pre></div></div>
+
 <!-- This provides an imperative program with the ability to impose some restrictions, but reduces efficiency. -->
 
 <p>However, this example still must be prepared to encounter all possible demands,
-which means that you can&#39;t perform the in-place calculation
+which means that you can’t perform the in-place calculation
 to reuse memory in the forward pass (a trick commonly used to reduce GPU memory usage).
-The techniques we&#39;ve discussed generate an explicit backward pass.
+The techniques we’ve discussed generate an explicit backward pass.
 Some of the libraries such as Caffe and CXXNet perform backprop implicitly on the same graph.
-The approach we&#39;ve discussed in this section also applies to them.</p>
+The approach we’ve discussed in this section also applies to them.</p>
 
 <p>Most configuration-file-based libraries,
 such as CXXNet and Caffe are designed
@@ -638,7 +775,7 @@
 
 <h3 id="model-checkpoint">Model Checkpoint</h3>
 
-<p>It&#39;s important to able to save a model and load it back later.
+<p>It’s important to able to save a model and load it back later.
 There are different ways to <em>save</em> your work.
 Normally, to save a neural network,
 you need to save two things: a net configuration
@@ -649,7 +786,8 @@
 you can directly serialize the computation graph, and load it back later.
 This solves the problem of saving the configuration
 without introducing an additional layer.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="n">A</span> <span class="o">=</span> <span class="n">Variable</span><span class="p">(</span><span class="s">'A'</span><span class="p">)</span>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="n">A</span> <span class="o">=</span> <span class="n">Variable</span><span class="p">(</span><span class="s">'A'</span><span class="p">)</span>
     <span class="n">B</span> <span class="o">=</span> <span class="n">Variable</span><span class="p">(</span><span class="s">'B'</span><span class="p">)</span>
     <span class="n">C</span> <span class="o">=</span> <span class="n">B</span> <span class="o">*</span> <span class="n">A</span>
     <span class="n">D</span> <span class="o">=</span> <span class="n">C</span> <span class="o">+</span> <span class="n">Constant</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
@@ -659,22 +797,23 @@
     <span class="n">f</span> <span class="o">=</span> <span class="nb">compile</span><span class="p">([</span><span class="n">D2</span><span class="p">])</span>
     <span class="c1"># more operations
 </span>    <span class="o">...</span>
-</code></pre></div>
+</code></pre></div></div>
+
 <p>Because an imperative program executes as it describes the computation,
-you have to save the code itself as the <code>configuration</code>,
+you have to save the code itself as the <code class="highlighter-rouge">configuration</code>,
 or build another configuration layer on top of the imperative language.</p>
 
 <h3 id="parameter-updates">Parameter Updates</h3>
 
 <p>Most symbolic programs are data flow (computation) graphs.
 Data flow graphs describe computation.
-But it&#39;s not obvious how to use graphs to describe parameter updates.
-That&#39;s because parameter updates introduce mutation,
+But it’s not obvious how to use graphs to describe parameter updates.
+That’s because parameter updates introduce mutation,
 which is not a data flow concept.
 Most symbolic programs introduce a special update statement
 to update persistent state in the programs.</p>
 
-<p>It&#39;s usually easier to write parameter updates in an imperative style,
+<p>It’s usually easier to write parameter updates in an imperative style,
 especially when you need multiple updates that relate to each other.
 For symbolic programs, the update statement is also executed as you call it.
 So in that sense, most symbolic deep learning libraries
@@ -685,7 +824,7 @@
 
 <p>In comparing the two programming styles,
 some of our arguments might not be strictly true,
-i.e., it&#39;s possible to make an imperative program
+i.e., it’s possible to make an imperative program
 more like a traditional symbolic program or vice versa.
 However, the two archetypes are useful abstractions,
 especially for understanding the differences between deep learning libraries.
@@ -702,34 +841,35 @@
 In general, there are two families of operations supported by most deep learning libraries:</p>
 
 <ul>
-<li>Big operations - typically for computing neural network layers (e.g. FullyConnected and BatchNormalize).</li>
-<li>Small operations - mathematical functions like matrix multiplication and element-wise addition.</li>
+  <li>Big operations - typically for computing neural network layers (e.g. FullyConnected and BatchNormalize).</li>
+  <li>Small operations - mathematical functions like matrix multiplication and element-wise addition.</li>
 </ul>
 
 <p>Libraries like CXXNet and Caffe support layer-level operations.
 Libraries like Theano and Minerva support fine-grained operations.</p>
 
 <h3 id="smaller-operations-can-be-more-flexible">Smaller Operations Can Be More Flexible</h3>
-
-<p>It&#39;s quite natural to use smaller operations to compose bigger operations.
+<p>It’s quite natural to use smaller operations to compose bigger operations.
 For example, the sigmoid unit can simply be composed of division, addition and an exponentiation:</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="n">sigmoid</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="o">=</span> <span class="mf">1.0</span> <span class="o">/</span> <span class="p">(</span><span class="mf">1.0</span> <span class="o">+</span> <span class="n">exp</span><span class="p">(</span><span class="o">-</span><span class="n">x</span><span class="p">))</span>
-</code></pre></div>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="n">sigmoid</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="o">=</span> <span class="mf">1.0</span> <span class="o">/</span> <span class="p">(</span><span class="mf">1.0</span> <span class="o">+</span> <span class="n">exp</span><span class="p">(</span><span class="o">-</span><span class="n">x</span><span class="p">))</span>
+</code></pre></div></div>
 <p>Using smaller operations as building blocks, you can express nearly anything you want.
-If you&#39;re more familiar with CXXNet- or Caffe-style layers,
-note that these operations don&#39;t differ from a layer, except that they are smaller.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="n">SigmoidLayer</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="o">=</span> <span class="n">EWiseDivisionLayer</span><span class="p">(</span><span class="mf">1.0</span><span class="p">,</span> <span class="n">AddScalarLayer</span><span class="p">(</span><span class="n">ExpLayer</span><span class="p">(</span><span class="o">-</span><span class="n">x</span><span class="p">),</span> <span class="mf">1.0</span><span class="p">))</span>
-</code></pre></div>
+If you’re more familiar with CXXNet- or Caffe-style layers,
+note that these operations don’t differ from a layer, except that they are smaller.</p>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="n">SigmoidLayer</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="o">=</span> <span class="n">EWiseDivisionLayer</span><span class="p">(</span><span class="mf">1.0</span><span class="p">,</span> <span class="n">AddScalarLayer</span><span class="p">(</span><span class="n">ExpLayer</span><span class="p">(</span><span class="o">-</span><span class="n">x</span><span class="p">),</span> <span class="mf">1.0</span><span class="p">))</span>
+</code></pre></div></div>
 <p>This expression composes three layers,
 with each defining its forward and backward (gradient) function.
 Using smaller operations gives you the advantage of building new layers quickly,
 because you only need to compose the components.</p>
 
 <h3 id="big-operations-are-more-efficient">Big Operations Are More Efficient</h3>
-
 <p>Directly composing sigmoid layers requires three layers of operation, instead of one.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="n">SigmoidLayer</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="o">=</span> <span class="n">EWiseDivisionLayer</span><span class="p">(</span><span class="mf">1.0</span><span class="p">,</span> <span class="n">AddScalarLayer</span><span class="p">(</span><span class="n">ExpLayer</span><span class="p">(</span><span class="o">-</span><span class="n">x</span><span class="p">),</span> <span class="mf">1.0</span><span class="p">))</span>
-</code></pre></div>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="n">SigmoidLayer</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="o">=</span> <span class="n">EWiseDivisionLayer</span><span class="p">(</span><span class="mf">1.0</span><span class="p">,</span> <span class="n">AddScalarLayer</span><span class="p">(</span><span class="n">ExpLayer</span><span class="p">(</span><span class="o">-</span><span class="n">x</span><span class="p">),</span> <span class="mf">1.0</span><span class="p">))</span>
+</code></pre></div></div>
 <p>This code creates overhead for computation and memory (which could be optimized, with cost).</p>
 
 <p>Libraries like CXXNet and Caffe take a different approach.
@@ -742,20 +882,20 @@
 <h3 id="compilation-and-optimization">Compilation and Optimization</h3>
 
 <p>Can small operations be optimized? Of course, they can.
-Let&#39;s look at the system optimization part of the compilation engine.
+Let’s look at the system optimization part of the compilation engine.
 Two types of optimization can be performed on the computation graph:</p>
 
 <ul>
-<li>Memory allocation optimization, to reuse the memory of the intermediate computations.</li>
-<li>Operator fusion, to detect sub-graph patterns, such as the sigmoid, and fuse them into a bigger operation kernel.</li>
+  <li>Memory allocation optimization, to reuse the memory of the intermediate computations.</li>
+  <li>Operator fusion, to detect sub-graph patterns, such as the sigmoid, and fuse them into a bigger operation kernel.</li>
 </ul>
 
-<p>Memory allocation optimization isn&#39;t restricted to small operations graphs.
+<p>Memory allocation optimization isn’t restricted to small operations graphs.
 You can use it with bigger operations graph, too.
 However, optimization might not be essential
 for bigger operation libraries like CXXNet and Caffe,
-because you can&#39;t find the compilation step in them.
-However, there&#39;s a (dumb) <code>compilation step</code> in these libraries,
+because you can’t find the compilation step in them.
+However, there’s a (dumb) <code class="highlighter-rouge">compilation step</code> in these libraries,
 that basically translates the layers into a fixed forward,
 backprop execution plan, by running each operation one by one.</p>
 
@@ -774,18 +914,17 @@
 for the libraries that solely support smaller operations.</p>
 
 <p>As in the case of symbolic vs. imperative,
-the bigger operation libraries &quot;cheat&quot;
+the bigger operation libraries “cheat”
 by asking you to provide restrictions (to the common layer),
 so that you actually perform the sub-graph matching.
 This moves the compilation overhead to the real brain, which is usually not too bad.</p>
 
 <h3 id="expression-template-and-statically-typed-language">Expression Template and Statically Typed Language</h3>
-
 <p>You always have a need to write small operations and compose them.
 Libraries like Caffe use hand-crafted kernels to build these bigger blocks.
 Otherwise, you would have to compose smaller operations using Python.</p>
 
-<p>There&#39;s a third choice that works pretty well.
+<p>There’s a third choice that works pretty well.
 This is called the expression template.
 Basically, you use template programming to
 generate generic kernels from an expression tree at compile time.
@@ -798,23 +937,23 @@
 is that expression evaluation is done at compile time for C++ with an existing type,
 so there is no additional runtime overhead.
 In principle, this is also possible with other statically typed languages that support templates,
-but we&#39;ve seen this trick used only in C++.</p>
+but we’ve seen this trick used only in C++.</p>
 
 <p>Expression template libraries create a middle ground between Python operations
 and hand-crafted big kernels by allowing C++ users to craft efficient big
-operations by composing smaller operations. It&#39;s an option worth considering.</p>
+operations by composing smaller operations. It’s an option worth considering.</p>
 
 <h2 id="mix-the-approaches">Mix the Approaches</h2>
 
-<p>Now that we&#39;ve compared the programming models, which one should you choose?
-Before delving into that, we should emphasize that depending on the problems you&#39;re trying to solve,
+<p>Now that we’ve compared the programming models, which one should you choose?
+Before delving into that, we should emphasize that depending on the problems you’re trying to solve,
 our comparison might not necessarily have a big impact.</p>
 
-<p>Remember <a href="https://en.wikipedia.org/wiki/Amdahl%27s_law">Amdahl&#39;s law</a>:
+<p>Remember <a href="https://en.wikipedia.org/wiki/Amdahl%27s_law">Amdahl’s law</a>:
 If you are optimizing a non-performance-critical part of your problem,
-you won&#39;t get much of a performance gain.</p>
+you won’t get much of a performance gain.</p>
 
-<p>As you&#39;ve seen, there usually is a trade-off between efficiency,
+<p>As you’ve seen, there usually is a trade-off between efficiency,
 flexibility, and engineering complexity.
 The more suitable programming style depends on the problem you are trying to solve.
 For example, imperative programs are better for parameter updates,
@@ -822,8 +961,8 @@
 
 <p>We advocate <em>mixing</em> the approaches.
 Sometimes the part that we want to be flexible
-isn&#39;t crucial to performance.
-In these cases, it&#39;s okay to leave some efficiency on the table
+isn’t crucial to performance.
+In these cases, it’s okay to leave some efficiency on the table
 to support more flexible interfaces.
 In machine learning, combining methods usually works better than using just one.</p>
 
@@ -832,20 +971,20 @@
 In this section, we discuss how to do so.</p>
 
 <h3 id="symbolic-and-imperative-programs">Symbolic and Imperative Programs</h3>
-
 <p>There are two ways to mix symbolic and imperative programs:</p>
 
 <ul>
-<li>Use imperative programs within symbolic programs as callbacks</li>
-<li>Use symbolic programs as part of imperative programs</li>
+  <li>Use imperative programs within symbolic programs as callbacks</li>
+  <li>Use symbolic programs as part of imperative programs</li>
 </ul>
 
-<p>We&#39;ve observed that it&#39;s usually helpful to write parameter updates imperatively,
+<p>We’ve observed that it’s usually helpful to write parameter updates imperatively,
 and perform gradient calculations in symbolic programs.</p>
 
 <p>Symbolic libraries already mix programs because Python itself is imperative.
 For example, the following program mixes the symbolic approach with NumPy, which is imperative.</p>
-<div class="highlight"><pre><code class="language-python" data-lang="python">    <span class="n">A</span> <span class="o">=</span> <span class="n">Variable</span><span class="p">(</span><span class="s">'A'</span><span class="p">)</span>
+
+<div class="language-python highlighter-rouge"><div class="highlight"><pre class="highlight"><code>    <span class="n">A</span> <span class="o">=</span> <span class="n">Variable</span><span class="p">(</span><span class="s">'A'</span><span class="p">)</span>
     <span class="n">B</span> <span class="o">=</span> <span class="n">Variable</span><span class="p">(</span><span class="s">'B'</span><span class="p">)</span>
     <span class="n">C</span> <span class="o">=</span> <span class="n">B</span> <span class="o">*</span> <span class="n">A</span>
     <span class="n">D</span> <span class="o">=</span> <span class="n">C</span> <span class="o">+</span> <span class="n">Constant</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
@@ -853,7 +992,7 @@
 </span>    <span class="n">f</span> <span class="o">=</span> <span class="nb">compile</span><span class="p">(</span><span class="n">D</span><span class="p">)</span>
     <span class="n">d</span> <span class="o">=</span> <span class="n">f</span><span class="p">(</span><span class="n">A</span><span class="o">=</span><span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">(</span><span class="mi">10</span><span class="p">),</span> <span class="n">B</span><span class="o">=</span><span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">(</span><span class="mi">10</span><span class="p">)</span><span class="o">*</span><span class="mi">2</span><span class="p">)</span>
     <span class="n">d</span> <span class="o">=</span> <span class="n">d</span> <span class="o">+</span> <span class="mf">1.0</span>
-</code></pre></div>
+</code></pre></div></div>
 <p>The symbolic graphs are compiled into a function that can be executed imperatively.
 The internals are a black box to the user.
 This is exactly like writing C++ programs and exposing them to Python, which we commonly do.</p>
@@ -874,10 +1013,10 @@
 Usually, you can use big operations to compose existing
 components, and use smaller operations to build the new parts.</p>
 
-<p>Recall Amdahl&#39;s law. Often, the new components
+<p>Recall Amdahl’s law. Often, the new components
 are not the cause of the computation bottleneck.
 Because the performance-critical part is already optimized by
-the bigger operations, it&#39;s okay to forego optimizing the additional small operations,
+the bigger operations, it’s okay to forego optimizing the additional small operations,
 or to do a limited amount of memory optimization instead
 of operation fusion and directly running them.</p>
 
@@ -886,22 +1025,22 @@
 <p>In this document, we compared multiple approaches
 to developing programming environments for deep learning.
 We compared both the usability and efficiency implications of each,
-finding that many of these trade-offs (like imperative vs symbolic aren&#39;t necessarily black and white).
+finding that many of these trade-offs (like imperative vs symbolic aren’t necessarily black and white).
 You can choose your approach, or combine the approaches
 to create more interesting and intelligent deep learning libraries.</p>
 
 <h2 id="contribute-to-mxnet">Contribute to MXNet</h2>
 
 <p>This document is part of our effort to provide <a href="overview">open-source system design notes</a>
-for deep learning libraries. If you&#39;re interested in contributing to <em>MXNet</em> or its
+for deep learning libraries. If you’re interested in contributing to <em>MXNet</em> or its
 documentation, <a href="http://github.com/apache/incubator-mxnet">fork us on GitHub</a>.</p>
 
 <h2 id="next-steps">Next Steps</h2>
 
 <ul>
-<li><a href="note_engine">Dependency Engine for Deep Learning</a></li>
-<li><a href="note_memory">Squeeze the Memory Consumption of Deep Learning</a></li>
-<li><a href="note_data_loading">Efficient Data Loading Module for Deep Learning</a></li>
+  <li><a href="note_engine">Dependency Engine for Deep Learning</a></li>
+  <li><a href="note_memory">Squeeze the Memory Consumption of Deep Learning</a></li>
+  <li><a href="note_data_loading">Efficient Data Loading Module for Deep Learning</a></li>
 </ul>
 
     </div>
@@ -918,8 +1057,7 @@
             <div class="col-4">
                 <h4 class="footer-category-title">Resources</h4>
                 <ul class="contact-list">
-                    <li><a class="u-email" href="mailto:dev@mxnet.apache.org">Dev list</a></li>
-                    <li><a class="u-email" href="mailto:user@mxnet.apache.org">User mailing list</a></li>
+                    <li><a href="/community/contribute#mxnet-dev-communications">Mailing lists</a></li>
                     <li><a href="https://cwiki.apache.org/confluence/display/MXNET/Apache+MXNet+Home">Developer Wiki</a></li>
                     <li><a href="https://issues.apache.org/jira/projects/MXNET/issues">Jira Tracker</a></li>
                     <li><a href="https://github.com/apache/incubator-mxnet/labels/Roadmap">Github Roadmap</a></li>
diff --git a/api/clojure.html b/api/clojure.html
index 898eff3..8b26b6f 100644
--- a/api/clojure.html
+++ b/api/clojure.html
@@ -5,23 +5,23 @@
   <meta name="viewport" content="width=device-width, initial-scale=1">
   <link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"><!-- Begin Jekyll SEO tag v2.6.1 -->
 <title>Clojure Guide | Apache MXNet</title>
-<meta name="generator" content="Jekyll v3.8.6" />
+<meta name="generator" content="Jekyll v4.0.0" />
 <meta property="og:title" content="Clojure Guide" />
 <meta property="og:locale" content="en_US" />
 <meta name="description" content="A flexible and efficient library for deep learning." />
 <meta property="og:description" content="A flexible and efficient library for deep learning." />
-<link rel="canonical" href="https://mxnet.incubator.apache.org/api/clojure" />
-<meta property="og:url" content="https://mxnet.incubator.apache.org/api/clojure" />
+<link rel="canonical" href="https://mxnet-beta.staged.apache.org//api/clojure" />
+<meta property="og:url" content="https://mxnet-beta.staged.apache.org//api/clojure" />
 <meta property="og:site_name" content="Apache MXNet" />
 <script type="application/ld+json">
-{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Clojure Guide","url":"https://mxnet.incubator.apache.org/api/clojure","@context":"https://schema.org"}</script>
+{"description":"A flexible and efficient library for deep learning.","@type":"WebPage","headline":"Clojure Guide","url":"https://mxnet-beta.staged.apache.org//api/clojure","@context":"https://schema.org"}</script>
 <!-- End Jekyll SEO tag -->
 <script src="https://medium-widget.pixelpoint.io/widget.js"></script>
-  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet.incubator.apache.org/feed.xml" title="Apache MXNet" /><script src="https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js"></script>
-
-  <script src="/assets/js/clipboard.js"></script>
-  <script src="/assets/js/copycode.js"></script>
-</head>
+  <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css" />
+  <link rel="stylesheet" href="/assets/main.css"><link type="application/atom+xml" rel="alternate" href="https://mxnet-beta.staged.apache.org//feed.xml" title="Apache MXNet" /><script src="/assets/js/jquery-3.3.1.min.js"></script><script src="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.js" defer></script>
+  <script src="/assets/js/globalSearch.js" defer></script>
+  <script src="/assets/js/clipboard.js" defer></script>
+  <script src="/assets/js/copycode.js" defer></script></head>
 <body><header class="site-header" role="banner">
 
   <script>
@@ -59,14 +59,173 @@
             </svg>
           </span>
       </label>
-
+      <div class="gs-search-border">
+        <div id="gs-search-icon"></div>
+        <form id="global-search-form">
+          <input id="global-search" type="text" title="Search" placeholder="Search" />
+          <div id="global-search-dropdown-container">
+            <button class="gs-current-version btn" type="button" data-toggle="dropdown">
+                <span id="gs-current-version-label">master</span>
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+          <span id="global-search-close">x</span>
+        </form>
+      </div>
       <div class="trigger">
+        <div id="global-search-mobile-border">
+          <div id="gs-search-icon-mobile"></div>
+          <input id="global-search-mobile" placeholder="Search..." type="text"/>
+          <div id="global-search-dropdown-container-mobile">
+            <button class="gs-current-version-mobile btn" type="button" data-toggle="dropdown">
+                <svg class="gs-dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true">
+                    <path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path>
+                </svg>
+            </button>
+            <ul class="gs-opt-group gs-version-dropdown-mobile">
+              
+                
+                  <li class="gs-opt gs-versions active">master</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.6</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.5.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.4.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.3.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.2.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.1.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">1.0.0</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.12.1</li>
+                
+              
+                
+                  <li class="gs-opt gs-versions">0.11.0</li>
+                
+              
+            </ul>
+        </div>
+        </div>
         <a class="page-link" href="/get_started">Get Started</a>
         <a class="page-link" href="/blog">Blog</a>
         <a class="page-link" href="/features">Features</a>
         <a class="page-link" href="/ecosystem">Ecosystem</a>
         <a class="page-link" href="/api">Docs & Tutorials</a>
         <a class="page-link" href="https://github.com/apache/incubator-mxnet">GitHub</a>
+        <div class="dropdown">
+          <span class="dropdown-header">master
+            <svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
+          </span>
+          <div class="dropdown-content">
+            
+              
+                <a class="dropdown-option-active" href="/">master</a>
+              
+            
+              
+                <a href="/versions/1.6/">1.6</a>
+              
+            
+              
+                <a href="/versions/1.5.0/">1.5.0</a>
+              
+            
+              
+                <a href="/versions/1.4.1/">1.4.1</a>
+              
+            
+              
+                <a href="/versions/1.3.1/">1.3.1</a>
+              
+            
+              
+                <a href="/versions/1.2.1/">1.2.1</a>
+              
+            
+              
+                <a href="/versions/1.1.0/">1.1.0</a>
+              
+            
+              
+                <a href="/versions/1.0.0/">1.0.0</a>
+              
+            
+              
+                <a href="/versions/0.12.1/">0.12.1</a>
+              
+            
+              
+                <a href="/versions/0.11.0/">0.11.0</a>
+              
+            
+          </div>
+        </div>
       </div>
     </nav>
   </div>
@@ -92,6 +251,8 @@
         
            <!-- resource-p -->
         
+           <!-- resource-p -->
+        
         
         
         
@@ -289,75 +450,36 @@
            <!-- resource-p -->
         
            <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
-        
-           <!-- resource-p -->
           <!-- page -->
         </ul>
     </div>
     <div class="col-9">
         <!--- Licensed to the Apache Software Foundation (ASF) under one -->
-
 <!--- or more contributor license agreements.  See the NOTICE file -->
-
 <!--- distributed with this work for additional information -->
-
 <!--- regarding copyright ownership.  The ASF licenses this file -->
-
 <!--- to you under the Apache License, Version 2.0 (the -->
-
 <!--- "License"); you may not use this file except in compliance -->
-
 <!--- with the License.  You may obtain a copy of the License at -->
 
 <!---   http://www.apache.org/licenses/LICENSE-2.0 -->
 
 <!--- Unless required by applicable law or agreed to in writing, -->
-
 <!--- software distributed under the License is distributed on an -->
-
 <!--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -->
-
 <!--- KIND, either express or implied.  See the License for the -->
-
 <!--- specific language governing permissions and limitations -->
-
 <!--- under the License. -->
 
-<h1 id="mxnet-clojure-api">MXNet - Clojure API</h1>
+<h1 id="mxnet---clojure-api">MXNet - Clojure API</h1>
 
 <p>MXNet supports the Clojure programming language. The MXNet Clojure package brings flexible and efficient GPU
 computing and state-of-art deep learning to Clojure. It enables you to write seamless tensor/matrix computation with multiple GPUs in Clojure. It also lets you construct and customize the state-of-art deep learning models in Clojure, and apply them to tasks, such as image classification and data science challenges.</p>
 
 <h2 id="tensor-and-matrix-computations">Tensor and Matrix Computations</h2>
-
 <p>You can perform tensor or matrix computation in pure Clojure:</p>
-<div class="highlight"><pre><code class="language-clojure" data-lang="clojure"><span class="p">(</span><span class="k">def</span><span class="w"> </span><span class="n">arr</span><span class="w"> </span><span class="p">(</span><span class="nf">ndarray/ones</span><span class="w"> </span><span class="p">[</span><span class="mi">2</span><span class="w"> </span><span class="mi">3</span><span class="p">]))</span><span class="w">
+
+<div class="language-clojure highlighter-rouge"><div class="highlight"><pre class="highlight"><code><span class="p">(</span><span class="k">def</span><span class="w"> </span><span class="n">arr</span><span class="w"> </span><span class="p">(</span><span class="nf">ndarray/ones</span><span class="w"> </span><span class="p">[</span><span class="mi">2</span><span class="w"> </span><span class="mi">3</span><span class="p">]))</span><span class="w">
 
 </span><span class="n">arr</span><span class="w"> </span><span class="c1">;=&gt; #object[org.apache.mxnet.NDArray 0x597d72e "org.apache.mxnet.NDArray@e35c3ba9"]</span><span class="w">
 
@@ -368,8 +490,8 @@
 
 </span><span class="p">(</span><span class="nf">ndarray/shape-vec</span><span class="w"> </span><span class="p">(</span><span class="nf">ndarray/*</span><span class="w"> </span><span class="n">arr</span><span class="w"> </span><span class="mi">2</span><span class="p">))</span><span class="w"> </span><span class="c1">;=&gt; [2 3]</span><span class="w">
 
-</span></code></pre></div>
-    </div>
+</span></code></pre></div></div>
+</div>
 </div>
 
         </div>
@@ -383,8 +505,7 @@
             <div class="col-4">
                 <h4 class="footer-category-title">Resources</h4>
                 <ul class="contact-list">
-                    <li><a class="u-email" href="mailto:dev@mxnet.apache.org">Dev list</a></li>
-                    <li><a class="u-email" href="mailto:user@mxnet.apache.org">User mailing list</a></li>
+                    <li><a href="/community/contribute#mxnet-dev-communications">Mailing lists</a></li>
                     <li><a href="https://cwiki.apache.org/confluence/display/MXNET/Apache+MXNet+Home">Developer Wiki</a></li>
                     <li><a href="https://issues.apache.org/jira/projects/MXNET/issues">Jira Tracker</a></li>
                     <li><a href="https://github.com/apache/incubator-mxnet/labels/Roadmap">Github Roadmap</a></li>
diff --git a/api/clojure/docs/api/index.html b/api/clojure/docs/api/index.html
deleted file mode 100644
index 0004134..0000000
--- a/api/clojure/docs/api/index.html
+++ /dev/null
@@ -1,3 +0,0 @@
-<!DOCTYPE html PUBLIC ""
-    "">
-<html><head><meta charset="UTF-8" /><title>Clojure-mxnet 1.6.0-SNAPSHOT</title><link rel="stylesheet" type="text/css" href="css/default.css" /><link rel="stylesheet" type="text/css" href="css/highlight.css" /><script type="text/javascript" src="js/highlight.min.js"></script><script type="text/javascript" src="js/jquery.min.js"></script><script type="text/javascript" src="js/page_effects.js"></script><script>hljs.initHighlightingOnLoad();</script></head><body><div id="header"><h2>Generated by <a href="https://github.com/weavejester/codox">Codox</a></h2><h1><a href="index.html"><span class="project-title"><span class="project-name">Clojure-mxnet</span> <span class="project-version">1.6.0-SNAPSHOT</span></span></a></h1></div><div class="sidebar primary"><h3 class="no-link"><span class="inner">Project</span></h3><ul class="index-link"><li class="depth-1 current"><a href="index.html"><div class="inner">Index</div></a></li></ul><h3 class="no-link"><span class="inner">Namespaces</span></h3><ul><li class="depth-1"><div class="no-link"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>org</span></div></div></li><li class="depth-2"><div class="no-link"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>apache</span></div></div></li><li class="depth-3"><div class="no-link"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>clojure-mxnet</span></div></div></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.base.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>base</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.callback.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>callback</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.context.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>context</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.dtype.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>dtype</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.eval-metric.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>eval-metric</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.executor.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>executor</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.image.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>image</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.infer.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>infer</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.initializer.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>initializer</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.io.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>io</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.kvstore.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>kvstore</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.kvstore-server.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>kvstore-server</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.layout.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>layout</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.lr-scheduler.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>lr-scheduler</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.module.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>module</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.monitor.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>monitor</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.ndarray.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>ndarray</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.ndarray-api.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>ndarray-api</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.ndarray-random-api.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>ndarray-random-api</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.optimizer.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>optimizer</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.primitives.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>primitives</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.profiler.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>profiler</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.random.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>random</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.resource-scope.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>resource-scope</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.shape.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>shape</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.symbol.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>symbol</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.symbol-api.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>symbol-api</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.symbol-random-api.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>symbol-random-api</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.util.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>util</span></div></a></li><li class="depth-4"><a href="org.apache.clojure-mxnet.visualization.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>visualization</span></div></a></li></ul></div><div class="namespace-index" id="content"><h1><span class="project-title"><span class="project-name">Clojure-mxnet</span> <span class="project-version">1.6.0-SNAPSHOT</span></span></h1><h5 class="license">Released under the <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache License</a></h5><div class="doc"><p>Clojure package for MXNet.</p></div><h2>Installation</h2><p>To install, add the following dependency to your project or build file:</p><pre class="deps">[org.apache.mxnet.contrib.clojure/clojure-mxnet "1.6.0-SNAPSHOT"]</pre><h2>Namespaces</h2><div class="namespace"><h3><a href="org.apache.clojure-mxnet.base.html">org.apache.clojure-mxnet.base</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.base.html#var-MX_REAL_TYPE">MX_REAL_TYPE</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.callback.html">org.apache.clojure-mxnet.callback</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.callback.html#var-invoke">invoke</a> </li><li> <a href="org.apache.clojure-mxnet.callback.html#var-speedometer">speedometer</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.context.html">org.apache.clojure-mxnet.context</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.context.html#var-cpu">cpu</a> </li><li> <a href="org.apache.clojure-mxnet.context.html#var-cpu-context">cpu-context</a> </li><li> <a href="org.apache.clojure-mxnet.context.html#var-default-context">default-context</a> </li><li> <a href="org.apache.clojure-mxnet.context.html#var-device-id">device-id</a> </li><li> <a href="org.apache.clojure-mxnet.context.html#var-device-type">device-type</a> </li><li> <a href="org.apache.clojure-mxnet.context.html#var-gpu">gpu</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.dtype.html">org.apache.clojure-mxnet.dtype</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.dtype.html#var-FLOAT16">FLOAT16</a> </li><li> <a href="org.apache.clojure-mxnet.dtype.html#var-FLOAT32">FLOAT32</a> </li><li> <a href="org.apache.clojure-mxnet.dtype.html#var-FLOAT64">FLOAT64</a> </li><li> <a href="org.apache.clojure-mxnet.dtype.html#var-INT32">INT32</a> </li><li> <a href="org.apache.clojure-mxnet.dtype.html#var-UINT8">UINT8</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.eval-metric.html">org.apache.clojure-mxnet.eval-metric</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-accuracy">accuracy</a> </li><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-comp-metric">comp-metric</a> </li><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-custom-metric">custom-metric</a> </li><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-f1">f1</a> </li><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-get">get</a> </li><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-get-and-reset">get-and-reset</a> </li><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-mae">mae</a> </li><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-mse">mse</a> </li><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-perplexity">perplexity</a> </li><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-reset">reset</a> </li><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-rmse">rmse</a> </li><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-top-k-accuracy">top-k-accuracy</a> </li><li> <a href="org.apache.clojure-mxnet.eval-metric.html#var-update">update</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.executor.html">org.apache.clojure-mxnet.executor</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.executor.html#var--.3Evec">-&gt;vec</a> </li><li> <a href="org.apache.clojure-mxnet.executor.html#var-arg-arrays">arg-arrays</a> </li><li> <a href="org.apache.clojure-mxnet.executor.html#var-arg-map">arg-map</a> </li><li> <a href="org.apache.clojure-mxnet.executor.html#var-backward">backward</a> </li><li> <a href="org.apache.clojure-mxnet.executor.html#var-forward">forward</a> </li><li> <a href="org.apache.clojure-mxnet.executor.html#var-get-grad">get-grad</a> </li><li> <a href="org.apache.clojure-mxnet.executor.html#var-grad-arrays">grad-arrays</a> </li><li> <a href="org.apache.clojure-mxnet.executor.html#var-grad-map">grad-map</a> </li><li> <a href="org.apache.clojure-mxnet.executor.html#var-outputs">outputs</a> </li><li> <a href="org.apache.clojure-mxnet.executor.html#var-reshape">reshape</a> </li><li> <a href="org.apache.clojure-mxnet.executor.html#var-set-arg">set-arg</a> </li><li> <a href="org.apache.clojure-mxnet.executor.html#var-set-arg-arrays">set-arg-arrays</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.image.html">org.apache.clojure-mxnet.image</a></h3><div class="doc"><pre class="plaintext">Image API of Clojure package.</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.image.html#var-apply-border">apply-border</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-COLOR">COLOR</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-decode">decode</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-decode-image">decode-image</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-draw-bounding-box.21">draw-bounding-box!</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-fixed-crop">fixed-crop</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-GRAYSCALE">GRAYSCALE</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-ndarray-.3Eimage">ndarray-&gt;image</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-read">read</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-read-image">read-image</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-resize">resize</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-resize-image">resize-image</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-rgb-array.3F">rgb-array?</a> </li><li> <a href="org.apache.clojure-mxnet.image.html#var-to-image">to-image</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.infer.html">org.apache.clojure-mxnet.infer</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.infer.html#var-AClassifier">AClassifier</a> </li><li> <a href="org.apache.clojure-mxnet.infer.html#var-AImageClassifier">AImageClassifier</a> </li><li> <a href="org.apache.clojure-mxnet.infer.html#var-AInferenceFactory">AInferenceFactory</a> </li><li> <a href="org.apache.clojure-mxnet.infer.html#var-AObjectDetector">AObjectDetector</a> </li><li> <a href="org.apache.clojure-mxnet.infer.html#var-APredictor">APredictor</a> </li><li> <a href="org.apache.clojure-mxnet.infer.html#var-buffered-image-to-pixels">buffered-image-to-pixels</a> </li><li> <a href="org.apache.clojure-mxnet.infer.html#var-convert-descriptors">convert-descriptors</a> </li><li> <a href="org.apache.clojure-mxnet.infer.html#var-load-image-from-file">load-image-from-file</a> </li><li> <a href="org.apache.clojure-mxnet.infer.html#var-load-image-paths">load-image-paths</a> </li><li> <a href="org.apache.clojure-mxnet.infer.html#var-model-factory">model-factory</a> </li><li> <a href="org.apache.clojure-mxnet.infer.html#var-reshape-image">reshape-image</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.initializer.html">org.apache.clojure-mxnet.initializer</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.initializer.html#var-apply">apply</a> </li><li> <a href="org.apache.clojure-mxnet.initializer.html#var-init-weight">init-weight</a> </li><li> <a href="org.apache.clojure-mxnet.initializer.html#var-normal">normal</a> </li><li> <a href="org.apache.clojure-mxnet.initializer.html#var-uniform">uniform</a> </li><li> <a href="org.apache.clojure-mxnet.initializer.html#var-xavier">xavier</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.io.html">org.apache.clojure-mxnet.io</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.io.html#var-batch-data">batch-data</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-batch-index">batch-index</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-batch-label">batch-label</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-batch-pad">batch-pad</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-batches">batches</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-create-iterator">create-iterator</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-create-mx-data-pack">create-mx-data-pack</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-csv-iter">csv-iter</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-csv-pack">csv-pack</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-data-batch">data-batch</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-data-desc">data-desc</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-data-desc-.3Emap">data-desc-&gt;map</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-dispose">dispose</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-do-batches">do-batches</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-for-batches">for-batches</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-has-next.3F">has-next?</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-image-recode-pack">image-recode-pack</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-image-record-iter">image-record-iter</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-iter-data">iter-data</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-iter-init-label">iter-init-label</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-iter-label">iter-label</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-iterator">iterator</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-mnist-iter">mnist-iter</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-mnist-pack">mnist-pack</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-ndarray-iter">ndarray-iter</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-next">next</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-prefetching-iter">prefetching-iter</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-provide-data">provide-data</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-provide-data-desc">provide-data-desc</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-provide-label">provide-label</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-provide-label-desc">provide-label-desc</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-rand-iter">rand-iter</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-reduce-batches">reduce-batches</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-reset">reset</a> </li><li> <a href="org.apache.clojure-mxnet.io.html#var-resize-iter">resize-iter</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.kvstore.html">org.apache.clojure-mxnet.kvstore</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-barrier">barrier</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-create">create</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-dispose">dispose</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-init">init</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-load-optimizer-states">load-optimizer-states</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-num-dead-node">num-dead-node</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-num-workers">num-workers</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-pull">pull</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-push">push</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-rank">rank</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-save-optimizer-states">save-optimizer-states</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-send-command-to-servers">send-command-to-servers</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-set-barrier-before-exit">set-barrier-before-exit</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-set-optimizer">set-optimizer</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore.html#var-type">type</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.kvstore-server.html">org.apache.clojure-mxnet.kvstore-server</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.kvstore-server.html#var-init">init</a> </li><li> <a href="org.apache.clojure-mxnet.kvstore-server.html#var-start">start</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.layout.html">org.apache.clojure-mxnet.layout</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.layout.html#var-N">N</a> </li><li> <a href="org.apache.clojure-mxnet.layout.html#var-NCHW">NCHW</a> </li><li> <a href="org.apache.clojure-mxnet.layout.html#var-NT">NT</a> </li><li> <a href="org.apache.clojure-mxnet.layout.html#var-NTC">NTC</a> </li><li> <a href="org.apache.clojure-mxnet.layout.html#var-UNDEFINED">UNDEFINED</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.lr-scheduler.html">org.apache.clojure-mxnet.lr-scheduler</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.lr-scheduler.html#var-factor-scheduler">factor-scheduler</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.module.html">org.apache.clojure-mxnet.module</a></h3><div class="doc"><pre class="plaintext">Module API for Clojure package.</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.module.html#var-arg-params">arg-params</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-aux-params">aux-params</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-backward">backward</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-bind">bind</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-borrow-optimizer">borrow-optimizer</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-data-names">data-names</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-data-shapes">data-shapes</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-exec-group">exec-group</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-fit">fit</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-fit-params">fit-params</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-forward">forward</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-forward-backward">forward-backward</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-get-params">get-params</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-grad-arrays">grad-arrays</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-init-optimizer">init-optimizer</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-init-params">init-params</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-input-grads">input-grads</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-input-grads-merged">input-grads-merged</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-install-monitor">install-monitor</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-label-shapes">label-shapes</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-load-checkpoint">load-checkpoint</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-load-optimizer-states">load-optimizer-states</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-module">module</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-output-names">output-names</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-output-shapes">output-shapes</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-outputs">outputs</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-outputs-merged">outputs-merged</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-params">params</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-predict">predict</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-predict-batch">predict-batch</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-predict-every-batch">predict-every-batch</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-reshape">reshape</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-save-checkpoint">save-checkpoint</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-save-optimizer-states">save-optimizer-states</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-score">score</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-set-params">set-params</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-symbol">symbol</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-update">update</a> </li><li> <a href="org.apache.clojure-mxnet.module.html#var-update-metric">update-metric</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.monitor.html">org.apache.clojure-mxnet.monitor</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.monitor.html#var-monitor">monitor</a> </li><li> <a href="org.apache.clojure-mxnet.monitor.html#var-tic">tic</a> </li><li> <a href="org.apache.clojure-mxnet.monitor.html#var-toc">toc</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.ndarray.html">org.apache.clojure-mxnet.ndarray</a></h3><div class="doc"><pre class="plaintext">NDArray API for Clojure package.</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.25">%</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.25.3D">%=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-*">*</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-**">**</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-**.3D">**=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-*.3D">*=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.2B">+</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.2B.3D">+=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--">-</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3D">-=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Ebyte-vec">-&gt;byte-vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Edouble-vec">-&gt;double-vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Efloat-vec">-&gt;float-vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Eint-vec">-&gt;int-vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3End-vec">-&gt;nd-vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Endarray">-&gt;ndarray</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Eraw">-&gt;raw</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Evec">-&gt;vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.2F">/</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.3C">&lt;</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.3C.3D">&lt;=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.3E">&gt;</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.3E.3D">&gt;=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-abs">abs</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-activation">activation</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-adam-update">adam-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-add-n">add-n</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-all-finite">all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-amp-cast">amp-cast</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-amp-multicast">amp-multicast</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-api">api</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arange">arange</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arccos">arccos</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arccosh">arccosh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arcsin">arcsin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arcsinh">arcsinh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arctan">arctan</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arctanh">arctanh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-argmax">argmax</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-argmax-channel">argmax-channel</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-argmin">argmin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-argsort">argsort</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-array">array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-as-in-context">as-in-context</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-as-type">as-type</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-at">at</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-batch-dot">batch-dot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-batch-norm">batch-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-batch-norm-v1">batch-norm-v1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-batch-take">batch-take</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-bilinear-sampler">bilinear-sampler</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-block-grad">block-grad</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-add">broadcast-add</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-axes">broadcast-axes</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-axis">broadcast-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-div">broadcast-div</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-equal">broadcast-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-greater">broadcast-greater</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-greater-equal">broadcast-greater-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-hypot">broadcast-hypot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-lesser">broadcast-lesser</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-lesser-equal">broadcast-lesser-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-like">broadcast-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-logical-and">broadcast-logical-and</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-logical-or">broadcast-logical-or</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-logical-xor">broadcast-logical-xor</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-maximum">broadcast-maximum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-minimum">broadcast-minimum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-minus">broadcast-minus</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-mod">broadcast-mod</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-mul">broadcast-mul</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-not-equal">broadcast-not-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-plus">broadcast-plus</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-power">broadcast-power</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-sub">broadcast-sub</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-to">broadcast-to</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-bytes-allocated">bytes-allocated</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-cast">cast</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-cast-storage">cast-storage</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-cbrt">cbrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ceil">ceil</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-choose-element-0index">choose-element-0index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-clip">clip</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-close">close</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-concat">concat</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-concatenate">concatenate</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-context">context</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-convolution">convolution</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-convolution-v1">convolution-v1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-copy">copy</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-copy-to">copy-to</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-correlation">correlation</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-cos">cos</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-cosh">cosh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-creation-trace">creation-trace</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-crop">crop</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ctc-loss">ctc-loss</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-cumsum">cumsum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-custom">custom</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-deconvolution">deconvolution</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-degrees">degrees</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dependencies">dependencies</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-depth-to-space">depth-to-space</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-deserialize">deserialize</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-diag">diag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dispose">dispose</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dispose-deps">dispose-deps</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dispose-deps-except">dispose-deps-except</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-div">div</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-div.3D">div=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dot">dot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dropout">dropout</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dtype">dtype</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-element-wise-sum">element-wise-sum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-elemwise-add">elemwise-add</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-elemwise-div">elemwise-div</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-elemwise-mul">elemwise-mul</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-elemwise-sub">elemwise-sub</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-embedding">embedding</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-empty">empty</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-equal">equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-equals">equals</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-erf">erf</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-erfinv">erfinv</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-exp">exp</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-expand-dims">expand-dims</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-expm1">expm1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-fill-element-0index">fill-element-0index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-finalize">finalize</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-fix">fix</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-flatten">flatten</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-flip">flip</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-floor">floor</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ftml-update">ftml-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ftrl-update">ftrl-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-full">full</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-fully-connected">fully-connected</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-gamma">gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-gammaln">gammaln</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-gather-nd">gather-nd</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-greater">greater</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-greater-equal">greater-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-grid-generator">grid-generator</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-group-norm">group-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-handle">handle</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-hard-sigmoid">hard-sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-hash-code">hash-code</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-identity">identity</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-identity-attach-kl-sparse-reg">identity-attach-kl-sparse-reg</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-instance-norm">instance-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-internal">internal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-is-de-allocated">is-de-allocated</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-is-disposed">is-disposed</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-is-sparse">is-sparse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-khatri-rao">khatri-rao</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-l2-normalization">l2-normalization</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-layer-norm">layer-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-leaky-re-lu">leaky-re-lu</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-lesser">lesser</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-lesser-equal">lesser-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-det">linalg-det</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-extractdiag">linalg-extractdiag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-extracttrian">linalg-extracttrian</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-gelqf">linalg-gelqf</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-gemm">linalg-gemm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-gemm2">linalg-gemm2</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-inverse">linalg-inverse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-makediag">linalg-makediag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-maketrian">linalg-maketrian</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-potrf">linalg-potrf</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-potri">linalg-potri</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-slogdet">linalg-slogdet</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-sumlogdiag">linalg-sumlogdiag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-syrk">linalg-syrk</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-trmm">linalg-trmm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-trsm">linalg-trsm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linear-regression-output">linear-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-load">load</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-load-from-file">load-from-file</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-load2-array">load2-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-load2-map">load2-map</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-log">log</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-log-dispose-warning">log-dispose-warning</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-log-softmax">log-softmax</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-log10">log10</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-log1p">log1p</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-log2">log2</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-logical-not">logical-not</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-logistic-regression-output">logistic-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-lrn">lrn</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-mae-regression-output">mae-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-make-loss">make-loss</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-max">max</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-max-axis">max-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-maximum">maximum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-mean">mean</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-min">min</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-min-axis">min-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-minimum">minimum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-moments">moments</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-mp-nag-mom-update">mp-nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-mp-sgd-mom-update">mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-mp-sgd-update">mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-multi-all-finite">multi-all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-multi-mp-sgd-mom-update">multi-mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-multi-mp-sgd-update">multi-mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-multi-sgd-mom-update">multi-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-multi-sgd-update">multi-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-nag-mom-update">nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-nanprod">nanprod</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-nansum">nansum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-native-address">native-address</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-native-de-allocator">native-de-allocator</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-negative">negative</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-norm">norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-normal">normal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-not-equal">not-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-one-hot">one-hot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-onehot-encode">onehot-encode</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ones">ones</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ones-like">ones-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-org.apache.mxnet.nd-array">org.apache.mxnet.nd-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-pad">pad</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-pick">pick</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-pooling">pooling</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-pooling-v1">pooling-v1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-power">power</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-prod">prod</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-radians">radians</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random">random</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-exponential">random-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-gamma">random-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-generalized-negative-binomial">random-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-negative-binomial">random-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-normal">random-normal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-dirichlet">random-pdf-dirichlet</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-exponential">random-pdf-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-gamma">random-pdf-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-generalized-negative-binomial">random-pdf-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-negative-binomial">random-pdf-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-normal">random-pdf-normal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-poisson">random-pdf-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-uniform">random-pdf-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-poisson">random-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-randint">random-randint</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-uniform">random-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ravel-multi-index">ravel-multi-index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-rcbrt">rcbrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-reciprocal">reciprocal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ref">ref</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-register">register</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-relu">relu</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-repeat">repeat</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-reshape">reshape</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-reshape-like">reshape-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-reverse">reverse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-rint">rint</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-rmsprop-update">rmsprop-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-rmspropalex-update">rmspropalex-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-rnn">rnn</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-roi-pooling">roi-pooling</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-round">round</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-rsqrt">rsqrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-exponential">sample-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-gamma">sample-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-generalized-negative-binomial">sample-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-multinomial">sample-multinomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-negative-binomial">sample-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-normal">sample-normal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-poisson">sample-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-uniform">sample-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-save">save</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-save-to-file">save-to-file</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-scatter-nd">scatter-nd</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-scope">scope</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-scope-.3D">scope-=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sequence-last">sequence-last</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sequence-mask">sequence-mask</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sequence-reverse">sequence-reverse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-serialize">serialize</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-set">set</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sgd-mom-update">sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sgd-update">sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-shape">shape</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-shape-array">shape-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-shape-vec">shape-vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-shuffle">shuffle</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sigmoid">sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sign">sign</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-signsgd-update">signsgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-signum-update">signum-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sin">sin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sinh">sinh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-size">size</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-size-array">size-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-slice">slice</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-slice-axis">slice-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-slice-channel">slice-channel</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-slice-like">slice-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-smooth-l1">smooth-l1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-softmax">softmax</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-softmax-activation">softmax-activation</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-softmax-cross-entropy">softmax-cross-entropy</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-softmax-output">softmax-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-softmin">softmin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-softsign">softsign</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sort">sort</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-space-to-depth">space-to-depth</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sparse-format">sparse-format</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-spatial-transformer">spatial-transformer</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-split">split</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sqrt">sqrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-square">square</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-squeeze">squeeze</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-stack">stack</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-stop-gradient">stop-gradient</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sum">sum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sum-axis">sum-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-svm-output">svm-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-swap-axis">swap-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-swapaxes">swapaxes</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-t">t</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-take">take</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-tan">tan</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-tanh">tanh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-tile">tile</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-to-array">to-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-to-float64-array">to-float64-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-to-float64-scalar">to-float64-scalar</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-to-scalar">to-scalar</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-to-sparse">to-sparse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-to-string">to-string</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-topk">topk</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-tracing-enabled">tracing-enabled</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-transpose">transpose</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-trunc">trunc</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-unary--">unary--</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-uniform">uniform</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-unravel-index">unravel-index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-up-sampling">up-sampling</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-wait-to-read">wait-to-read</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-waitall">waitall</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-where">where</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-writable">writable</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-zeros">zeros</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-zeros-like">zeros-like</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.ndarray.html">org.apache.clojure-mxnet.ndarray</a></h3><div class="doc"><pre class="plaintext">NDArray API for Clojure package.</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.25">%</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.25.3D">%=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-*">*</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-**">**</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-**.3D">**=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-*.3D">*=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.2B">+</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.2B.3D">+=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--">-</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3D">-=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Ebyte-vec">-&gt;byte-vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Edouble-vec">-&gt;double-vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Efloat-vec">-&gt;float-vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Eint-vec">-&gt;int-vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3End-vec">-&gt;nd-vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Endarray">-&gt;ndarray</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Eraw">-&gt;raw</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var--.3Evec">-&gt;vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.2F">/</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.3C">&lt;</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.3C.3D">&lt;=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.3E">&gt;</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-.3E.3D">&gt;=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-abs">abs</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-activation">activation</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-adam-update">adam-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-add-n">add-n</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-all-finite">all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-amp-cast">amp-cast</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-amp-multicast">amp-multicast</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-api">api</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arange">arange</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arccos">arccos</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arccosh">arccosh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arcsin">arcsin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arcsinh">arcsinh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arctan">arctan</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-arctanh">arctanh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-argmax">argmax</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-argmax-channel">argmax-channel</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-argmin">argmin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-argsort">argsort</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-array">array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-as-in-context">as-in-context</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-as-type">as-type</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-at">at</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-batch-dot">batch-dot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-batch-norm">batch-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-batch-norm-v1">batch-norm-v1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-batch-take">batch-take</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-bilinear-sampler">bilinear-sampler</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-block-grad">block-grad</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-add">broadcast-add</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-axes">broadcast-axes</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-axis">broadcast-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-div">broadcast-div</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-equal">broadcast-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-greater">broadcast-greater</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-greater-equal">broadcast-greater-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-hypot">broadcast-hypot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-lesser">broadcast-lesser</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-lesser-equal">broadcast-lesser-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-like">broadcast-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-logical-and">broadcast-logical-and</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-logical-or">broadcast-logical-or</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-logical-xor">broadcast-logical-xor</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-maximum">broadcast-maximum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-minimum">broadcast-minimum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-minus">broadcast-minus</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-mod">broadcast-mod</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-mul">broadcast-mul</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-not-equal">broadcast-not-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-plus">broadcast-plus</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-power">broadcast-power</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-sub">broadcast-sub</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-broadcast-to">broadcast-to</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-bytes-allocated">bytes-allocated</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-cast">cast</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-cast-storage">cast-storage</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-cbrt">cbrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ceil">ceil</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-choose-element-0index">choose-element-0index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-clip">clip</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-close">close</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-concat">concat</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-concatenate">concatenate</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-context">context</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-convolution">convolution</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-convolution-v1">convolution-v1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-copy">copy</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-copy-to">copy-to</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-correlation">correlation</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-cos">cos</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-cosh">cosh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-creation-trace">creation-trace</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-crop">crop</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ctc-loss">ctc-loss</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-cumsum">cumsum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-custom">custom</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-deconvolution">deconvolution</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-degrees">degrees</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dependencies">dependencies</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-depth-to-space">depth-to-space</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-deserialize">deserialize</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-diag">diag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dispose">dispose</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dispose-deps">dispose-deps</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dispose-deps-except">dispose-deps-except</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-div">div</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-div.3D">div=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dot">dot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dropout">dropout</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-dtype">dtype</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-element-wise-sum">element-wise-sum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-elemwise-add">elemwise-add</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-elemwise-div">elemwise-div</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-elemwise-mul">elemwise-mul</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-elemwise-sub">elemwise-sub</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-embedding">embedding</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-empty">empty</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-equal">equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-equals">equals</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-erf">erf</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-erfinv">erfinv</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-exp">exp</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-expand-dims">expand-dims</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-expm1">expm1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-fill-element-0index">fill-element-0index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-finalize">finalize</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-fix">fix</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-flatten">flatten</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-flip">flip</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-floor">floor</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ftml-update">ftml-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ftrl-update">ftrl-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-full">full</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-fully-connected">fully-connected</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-gamma">gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-gammaln">gammaln</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-gather-nd">gather-nd</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-greater">greater</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-greater-equal">greater-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-grid-generator">grid-generator</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-group-norm">group-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-handle">handle</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-hard-sigmoid">hard-sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-hash-code">hash-code</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-identity">identity</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-identity-attach-kl-sparse-reg">identity-attach-kl-sparse-reg</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-instance-norm">instance-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-internal">internal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-is-de-allocated">is-de-allocated</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-is-disposed">is-disposed</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-is-sparse">is-sparse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-khatri-rao">khatri-rao</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-l2-normalization">l2-normalization</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-layer-norm">layer-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-leaky-re-lu">leaky-re-lu</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-lesser">lesser</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-lesser-equal">lesser-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-det">linalg-det</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-extractdiag">linalg-extractdiag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-extracttrian">linalg-extracttrian</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-gelqf">linalg-gelqf</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-gemm">linalg-gemm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-gemm2">linalg-gemm2</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-inverse">linalg-inverse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-makediag">linalg-makediag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-maketrian">linalg-maketrian</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-potrf">linalg-potrf</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-potri">linalg-potri</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-slogdet">linalg-slogdet</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-sumlogdiag">linalg-sumlogdiag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-syrk">linalg-syrk</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-trmm">linalg-trmm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linalg-trsm">linalg-trsm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-linear-regression-output">linear-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-load">load</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-load-from-file">load-from-file</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-load2-array">load2-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-load2-map">load2-map</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-log">log</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-log-dispose-warning">log-dispose-warning</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-log-softmax">log-softmax</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-log10">log10</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-log1p">log1p</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-log2">log2</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-logical-not">logical-not</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-logistic-regression-output">logistic-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-lrn">lrn</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-mae-regression-output">mae-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-make-loss">make-loss</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-max">max</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-max-axis">max-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-maximum">maximum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-mean">mean</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-min">min</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-min-axis">min-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-minimum">minimum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-moments">moments</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-mp-nag-mom-update">mp-nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-mp-sgd-mom-update">mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-mp-sgd-update">mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-multi-all-finite">multi-all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-multi-mp-sgd-mom-update">multi-mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-multi-mp-sgd-update">multi-mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-multi-sgd-mom-update">multi-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-multi-sgd-update">multi-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-nag-mom-update">nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-nanprod">nanprod</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-nansum">nansum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-native-address">native-address</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-native-de-allocator">native-de-allocator</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-negative">negative</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-norm">norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-normal">normal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-not-equal">not-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-one-hot">one-hot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-onehot-encode">onehot-encode</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ones">ones</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ones-like">ones-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-org.apache.mxnet.nd-array">org.apache.mxnet.nd-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-pad">pad</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-pick">pick</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-pooling">pooling</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-pooling-v1">pooling-v1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-power">power</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-prod">prod</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-radians">radians</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random">random</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-exponential">random-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-gamma">random-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-generalized-negative-binomial">random-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-negative-binomial">random-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-normal">random-normal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-dirichlet">random-pdf-dirichlet</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-exponential">random-pdf-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-gamma">random-pdf-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-generalized-negative-binomial">random-pdf-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-negative-binomial">random-pdf-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-normal">random-pdf-normal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-poisson">random-pdf-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-pdf-uniform">random-pdf-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-poisson">random-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-randint">random-randint</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-random-uniform">random-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ravel-multi-index">ravel-multi-index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-rcbrt">rcbrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-reciprocal">reciprocal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-ref">ref</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-register">register</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-relu">relu</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-repeat">repeat</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-reshape">reshape</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-reshape-like">reshape-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-reverse">reverse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-rint">rint</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-rmsprop-update">rmsprop-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-rmspropalex-update">rmspropalex-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-rnn">rnn</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-roi-pooling">roi-pooling</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-round">round</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-rsqrt">rsqrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-exponential">sample-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-gamma">sample-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-generalized-negative-binomial">sample-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-multinomial">sample-multinomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-negative-binomial">sample-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-normal">sample-normal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-poisson">sample-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sample-uniform">sample-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-save">save</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-save-to-file">save-to-file</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-scatter-nd">scatter-nd</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-scope">scope</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-scope-.3D">scope-=</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sequence-last">sequence-last</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sequence-mask">sequence-mask</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sequence-reverse">sequence-reverse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-serialize">serialize</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-set">set</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sgd-mom-update">sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sgd-update">sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-shape">shape</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-shape-array">shape-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-shape-vec">shape-vec</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-shuffle">shuffle</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sigmoid">sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sign">sign</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-signsgd-update">signsgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-signum-update">signum-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sin">sin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sinh">sinh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-size">size</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-size-array">size-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-slice">slice</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-slice-axis">slice-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-slice-channel">slice-channel</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-slice-like">slice-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-smooth-l1">smooth-l1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-softmax">softmax</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-softmax-activation">softmax-activation</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-softmax-cross-entropy">softmax-cross-entropy</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-softmax-output">softmax-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-softmin">softmin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-softsign">softsign</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sort">sort</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-space-to-depth">space-to-depth</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sparse-format">sparse-format</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-spatial-transformer">spatial-transformer</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-split">split</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sqrt">sqrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-square">square</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-squeeze">squeeze</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-stack">stack</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-stop-gradient">stop-gradient</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sum">sum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-sum-axis">sum-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-svm-output">svm-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-swap-axis">swap-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-swapaxes">swapaxes</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-t">t</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-take">take</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-tan">tan</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-tanh">tanh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-tile">tile</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-to-array">to-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-to-float64-array">to-float64-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-to-float64-scalar">to-float64-scalar</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-to-scalar">to-scalar</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-to-sparse">to-sparse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-to-string">to-string</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-topk">topk</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-tracing-enabled">tracing-enabled</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-transpose">transpose</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-trunc">trunc</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-unary--">unary--</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-uniform">uniform</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-unravel-index">unravel-index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-up-sampling">up-sampling</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-wait-to-read">wait-to-read</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-waitall">waitall</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-where">where</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-writable">writable</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-zeros">zeros</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray.html#var-zeros-like">zeros-like</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.ndarray-api.html">org.apache.clojure-mxnet.ndarray-api</a></h3><div class="doc"><pre class="plaintext">Experimental</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--copy">-copy</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-det">-linalg-det</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-extractdiag">-linalg-extractdiag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-extracttrian">-linalg-extracttrian</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-gelqf">-linalg-gelqf</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-gemm">-linalg-gemm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-gemm2">-linalg-gemm2</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-inverse">-linalg-inverse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-makediag">-linalg-makediag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-maketrian">-linalg-maketrian</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-potrf">-linalg-potrf</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-potri">-linalg-potri</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-slogdet">-linalg-slogdet</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-sumlogdiag">-linalg-sumlogdiag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-syrk">-linalg-syrk</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-trmm">-linalg-trmm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-trsm">-linalg-trsm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--np-cumsum">-np-cumsum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--ravel-multi-index">-ravel-multi-index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--shuffle">-shuffle</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--unravel-index">-unravel-index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-abs">abs</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-activation">activation</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-adam-update">adam-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-add-n">add-n</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-all-finite">all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-amp-cast">amp-cast</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-amp-multicast">amp-multicast</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-arccos">arccos</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-arccosh">arccosh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-arcsin">arcsin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-arcsinh">arcsinh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-arctan">arctan</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-arctanh">arctanh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-argmax">argmax</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-argmax-channel">argmax-channel</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-argmin">argmin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-argsort">argsort</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-batch-dot">batch-dot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-batch-norm">batch-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-batch-norm-v1">batch-norm-v1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-batch-take">batch-take</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-bilinear-sampler">bilinear-sampler</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-block-grad">block-grad</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-add">broadcast-add</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-axis">broadcast-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-div">broadcast-div</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-equal">broadcast-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-greater">broadcast-greater</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-greater-equal">broadcast-greater-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-hypot">broadcast-hypot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-lesser">broadcast-lesser</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-lesser-equal">broadcast-lesser-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-like">broadcast-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-logical-and">broadcast-logical-and</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-logical-or">broadcast-logical-or</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-logical-xor">broadcast-logical-xor</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-maximum">broadcast-maximum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-minimum">broadcast-minimum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-mod">broadcast-mod</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-mul">broadcast-mul</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-not-equal">broadcast-not-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-power">broadcast-power</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-sub">broadcast-sub</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-to">broadcast-to</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-cast">cast</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-cast-storage">cast-storage</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-cbrt">cbrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-ceil">ceil</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-clip">clip</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-concat">concat</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-convolution">convolution</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-convolution-v1">convolution-v1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-correlation">correlation</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-cos">cos</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-cosh">cosh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-crop">crop</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-ctc-loss">ctc-loss</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-deconvolution">deconvolution</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-degrees">degrees</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-depth-to-space">depth-to-space</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-diag">diag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-dot">dot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-dropout">dropout</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-elemwise-add">elemwise-add</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-elemwise-div">elemwise-div</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-elemwise-mul">elemwise-mul</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-elemwise-sub">elemwise-sub</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-embedding">embedding</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-erf">erf</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-erfinv">erfinv</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-exp">exp</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-expand-dims">expand-dims</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-expm1">expm1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-fill-element-0index">fill-element-0index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-fix">fix</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-flatten">flatten</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-floor">floor</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-ftml-update">ftml-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-ftrl-update">ftrl-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-fully-connected">fully-connected</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-gamma">gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-gammaln">gammaln</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-gather-nd">gather-nd</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-grid-generator">grid-generator</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-group-norm">group-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-hard-sigmoid">hard-sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-identity-attach-kl-sparse-reg">identity-attach-kl-sparse-reg</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-instance-norm">instance-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-khatri-rao">khatri-rao</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-l2-normalization">l2-normalization</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-layer-norm">layer-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-leaky-re-lu">leaky-re-lu</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-linear-regression-output">linear-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-log">log</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-log-softmax">log-softmax</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-log10">log10</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-log1p">log1p</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-log2">log2</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-logical-not">logical-not</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-logistic-regression-output">logistic-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-lrn">lrn</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-mae-regression-output">mae-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-make-loss">make-loss</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-max">max</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-mean">mean</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-min">min</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-moments">moments</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-mp-nag-mom-update">mp-nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-mp-sgd-mom-update">mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-mp-sgd-update">mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-all-finite">multi-all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-mp-sgd-mom-update">multi-mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-mp-sgd-update">multi-mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-sgd-mom-update">multi-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-sgd-update">multi-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-nag-mom-update">nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-nanprod">nanprod</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-nansum">nansum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-negative">negative</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-norm">norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-one-hot">one-hot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-ones-like">ones-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-pad">pad</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-pick">pick</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-pooling">pooling</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-pooling-v1">pooling-v1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-prod">prod</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-radians">radians</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-rcbrt">rcbrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-reciprocal">reciprocal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-relu">relu</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-repeat">repeat</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-reshape">reshape</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-reshape-like">reshape-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-reverse">reverse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-rint">rint</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-rmsprop-update">rmsprop-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-rmspropalex-update">rmspropalex-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-rnn">rnn</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-roi-pooling">roi-pooling</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-round">round</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-rsqrt">rsqrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-scatter-nd">scatter-nd</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sequence-last">sequence-last</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sequence-mask">sequence-mask</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sequence-reverse">sequence-reverse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sgd-mom-update">sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sgd-update">sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-shape-array">shape-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sigmoid">sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sign">sign</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-signsgd-update">signsgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-signum-update">signum-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sin">sin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sinh">sinh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-size-array">size-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-slice">slice</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-slice-axis">slice-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-slice-channel">slice-channel</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-slice-like">slice-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-smooth-l1">smooth-l1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmax">softmax</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmax-activation">softmax-activation</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmax-cross-entropy">softmax-cross-entropy</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmax-output">softmax-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmin">softmin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-softsign">softsign</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sort">sort</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-space-to-depth">space-to-depth</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-spatial-transformer">spatial-transformer</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sqrt">sqrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-square">square</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-squeeze">squeeze</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-stack">stack</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sum">sum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-svm-output">svm-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-swap-axis">swap-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-take">take</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-tan">tan</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-tanh">tanh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-tile">tile</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-topk">topk</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-transpose">transpose</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-trunc">trunc</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-up-sampling">up-sampling</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-where">where</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-zeros-like">zeros-like</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.ndarray-api.html">org.apache.clojure-mxnet.ndarray-api</a></h3><div class="doc"><pre class="plaintext">Experimental</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--copy">-copy</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-det">-linalg-det</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-extractdiag">-linalg-extractdiag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-extracttrian">-linalg-extracttrian</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-gelqf">-linalg-gelqf</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-gemm">-linalg-gemm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-gemm2">-linalg-gemm2</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-inverse">-linalg-inverse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-makediag">-linalg-makediag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-maketrian">-linalg-maketrian</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-potrf">-linalg-potrf</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-potri">-linalg-potri</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-slogdet">-linalg-slogdet</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-sumlogdiag">-linalg-sumlogdiag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-syrk">-linalg-syrk</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-trmm">-linalg-trmm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-trsm">-linalg-trsm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--np-cumsum">-np-cumsum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--ravel-multi-index">-ravel-multi-index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--shuffle">-shuffle</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var--unravel-index">-unravel-index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-abs">abs</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-activation">activation</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-adam-update">adam-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-add-n">add-n</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-all-finite">all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-amp-cast">amp-cast</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-amp-multicast">amp-multicast</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-arccos">arccos</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-arccosh">arccosh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-arcsin">arcsin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-arcsinh">arcsinh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-arctan">arctan</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-arctanh">arctanh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-argmax">argmax</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-argmax-channel">argmax-channel</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-argmin">argmin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-argsort">argsort</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-batch-dot">batch-dot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-batch-norm">batch-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-batch-norm-v1">batch-norm-v1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-batch-take">batch-take</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-bilinear-sampler">bilinear-sampler</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-block-grad">block-grad</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-add">broadcast-add</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-axis">broadcast-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-div">broadcast-div</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-equal">broadcast-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-greater">broadcast-greater</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-greater-equal">broadcast-greater-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-hypot">broadcast-hypot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-lesser">broadcast-lesser</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-lesser-equal">broadcast-lesser-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-like">broadcast-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-logical-and">broadcast-logical-and</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-logical-or">broadcast-logical-or</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-logical-xor">broadcast-logical-xor</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-maximum">broadcast-maximum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-minimum">broadcast-minimum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-mod">broadcast-mod</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-mul">broadcast-mul</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-not-equal">broadcast-not-equal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-power">broadcast-power</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-sub">broadcast-sub</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-to">broadcast-to</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-cast">cast</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-cast-storage">cast-storage</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-cbrt">cbrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-ceil">ceil</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-clip">clip</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-concat">concat</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-convolution">convolution</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-convolution-v1">convolution-v1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-correlation">correlation</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-cos">cos</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-cosh">cosh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-crop">crop</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-ctc-loss">ctc-loss</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-deconvolution">deconvolution</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-degrees">degrees</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-depth-to-space">depth-to-space</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-diag">diag</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-dot">dot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-dropout">dropout</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-elemwise-add">elemwise-add</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-elemwise-div">elemwise-div</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-elemwise-mul">elemwise-mul</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-elemwise-sub">elemwise-sub</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-embedding">embedding</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-erf">erf</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-erfinv">erfinv</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-exp">exp</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-expand-dims">expand-dims</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-expm1">expm1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-fill-element-0index">fill-element-0index</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-fix">fix</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-flatten">flatten</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-floor">floor</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-ftml-update">ftml-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-ftrl-update">ftrl-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-fully-connected">fully-connected</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-gamma">gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-gammaln">gammaln</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-gather-nd">gather-nd</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-grid-generator">grid-generator</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-group-norm">group-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-hard-sigmoid">hard-sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-identity-attach-kl-sparse-reg">identity-attach-kl-sparse-reg</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-instance-norm">instance-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-khatri-rao">khatri-rao</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-l2-normalization">l2-normalization</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-layer-norm">layer-norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-leaky-re-lu">leaky-re-lu</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-linear-regression-output">linear-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-log">log</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-log-softmax">log-softmax</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-log10">log10</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-log1p">log1p</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-log2">log2</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-logical-not">logical-not</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-logistic-regression-output">logistic-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-lrn">lrn</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-mae-regression-output">mae-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-make-loss">make-loss</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-max">max</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-mean">mean</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-min">min</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-moments">moments</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-mp-nag-mom-update">mp-nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-mp-sgd-mom-update">mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-mp-sgd-update">mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-all-finite">multi-all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-mp-sgd-mom-update">multi-mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-mp-sgd-update">multi-mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-sgd-mom-update">multi-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-sgd-update">multi-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-nag-mom-update">nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-nanprod">nanprod</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-nansum">nansum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-negative">negative</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-norm">norm</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-one-hot">one-hot</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-ones-like">ones-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-pad">pad</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-pick">pick</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-pooling">pooling</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-pooling-v1">pooling-v1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-prod">prod</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-radians">radians</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-rcbrt">rcbrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-reciprocal">reciprocal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-relu">relu</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-repeat">repeat</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-reshape">reshape</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-reshape-like">reshape-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-reverse">reverse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-rint">rint</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-rmsprop-update">rmsprop-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-rmspropalex-update">rmspropalex-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-rnn">rnn</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-roi-pooling">roi-pooling</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-round">round</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-rsqrt">rsqrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-scatter-nd">scatter-nd</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sequence-last">sequence-last</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sequence-mask">sequence-mask</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sequence-reverse">sequence-reverse</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sgd-mom-update">sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sgd-update">sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-shape-array">shape-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sigmoid">sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sign">sign</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-signsgd-update">signsgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-signum-update">signum-update</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sin">sin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sinh">sinh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-size-array">size-array</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-slice">slice</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-slice-axis">slice-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-slice-channel">slice-channel</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-slice-like">slice-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-smooth-l1">smooth-l1</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmax">softmax</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmax-activation">softmax-activation</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmax-cross-entropy">softmax-cross-entropy</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmax-output">softmax-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmin">softmin</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-softsign">softsign</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sort">sort</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-space-to-depth">space-to-depth</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-spatial-transformer">spatial-transformer</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sqrt">sqrt</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-square">square</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-squeeze">squeeze</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-stack">stack</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-sum">sum</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-svm-output">svm-output</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-swap-axis">swap-axis</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-take">take</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-tan">tan</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-tanh">tanh</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-tile">tile</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-topk">topk</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-transpose">transpose</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-trunc">trunc</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-up-sampling">up-sampling</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-where">where</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-api.html#var-zeros-like">zeros-like</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.ndarray-random-api.html">org.apache.clojure-mxnet.ndarray-random-api</a></h3><div class="doc"><pre class="plaintext">Experimental</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-exponential">exponential</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-exponential-like">exponential-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-gamma">gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-gamma-like">gamma-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-generalized-negative-binomial">generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-generalized-negative-binomial-like">generalized-negative-binomial-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-multinomial-like">multinomial-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-negative-binomial">negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-negative-binomial-like">negative-binomial-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-normal">normal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-normal-like">normal-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-dirichlet">pdf-dirichlet</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-exponential">pdf-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-gamma">pdf-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-generalized-negative-binomial">pdf-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-negative-binomial">pdf-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-normal">pdf-normal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-poisson">pdf-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-uniform">pdf-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-poisson">poisson</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-poisson-like">poisson-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-randint">randint</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-uniform">uniform</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-uniform-like">uniform-like</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.ndarray-random-api.html">org.apache.clojure-mxnet.ndarray-random-api</a></h3><div class="doc"><pre class="plaintext">Experimental</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-exponential">exponential</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-exponential-like">exponential-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-gamma">gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-gamma-like">gamma-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-generalized-negative-binomial">generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-generalized-negative-binomial-like">generalized-negative-binomial-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-multinomial-like">multinomial-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-negative-binomial">negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-negative-binomial-like">negative-binomial-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-normal">normal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-normal-like">normal-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-dirichlet">pdf-dirichlet</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-exponential">pdf-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-gamma">pdf-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-generalized-negative-binomial">pdf-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-negative-binomial">pdf-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-normal">pdf-normal</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-poisson">pdf-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-pdf-uniform">pdf-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-poisson">poisson</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-poisson-like">poisson-like</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-randint">randint</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-uniform">uniform</a> </li><li> <a href="org.apache.clojure-mxnet.ndarray-random-api.html#var-uniform-like">uniform-like</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.optimizer.html">org.apache.clojure-mxnet.optimizer</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.optimizer.html#var-ada-delta">ada-delta</a> </li><li> <a href="org.apache.clojure-mxnet.optimizer.html#var-ada-grad">ada-grad</a> </li><li> <a href="org.apache.clojure-mxnet.optimizer.html#var-adam">adam</a> </li><li> <a href="org.apache.clojure-mxnet.optimizer.html#var-create-state">create-state</a> </li><li> <a href="org.apache.clojure-mxnet.optimizer.html#var-dcasgd">dcasgd</a> </li><li> <a href="org.apache.clojure-mxnet.optimizer.html#var-nag">nag</a> </li><li> <a href="org.apache.clojure-mxnet.optimizer.html#var-rms-prop">rms-prop</a> </li><li> <a href="org.apache.clojure-mxnet.optimizer.html#var-sgd">sgd</a> </li><li> <a href="org.apache.clojure-mxnet.optimizer.html#var-sgld">sgld</a> </li><li> <a href="org.apache.clojure-mxnet.optimizer.html#var-update">update</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.primitives.html">org.apache.clojure-mxnet.primitives</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.primitives.html#var--.3Enum">-&gt;num</a> </li><li> <a href="org.apache.clojure-mxnet.primitives.html#var-mx-double">mx-double</a> </li><li> <a href="org.apache.clojure-mxnet.primitives.html#var-mx-float">mx-float</a> </li><li> <a href="org.apache.clojure-mxnet.primitives.html#var-primitive.3F">primitive?</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.profiler.html">org.apache.clojure-mxnet.profiler</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.profiler.html#var-dump-profile">dump-profile</a> </li><li> <a href="org.apache.clojure-mxnet.profiler.html#var-profiler-set-config">profiler-set-config</a> </li><li> <a href="org.apache.clojure-mxnet.profiler.html#var-profiler-set-state">profiler-set-state</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.random.html">org.apache.clojure-mxnet.random</a></h3><div class="doc"><pre class="plaintext">Random Number interface of mxnet.</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.random.html#var-normal">normal</a> </li><li> <a href="org.apache.clojure-mxnet.random.html#var-seed">seed</a> </li><li> <a href="org.apache.clojure-mxnet.random.html#var-uniform">uniform</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.resource-scope.html">org.apache.clojure-mxnet.resource-scope</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.resource-scope.html#var-using">using</a> </li><li> <a href="org.apache.clojure-mxnet.resource-scope.html#var-with-do">with-do</a> </li><li> <a href="org.apache.clojure-mxnet.resource-scope.html#var-with-let">with-let</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.shape.html">org.apache.clojure-mxnet.shape</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.shape.html#var--.3Eshape">-&gt;shape</a> </li><li> <a href="org.apache.clojure-mxnet.shape.html#var--.3Evec">-&gt;vec</a> </li><li> <a href="org.apache.clojure-mxnet.shape.html#var-length">length</a> </li><li> <a href="org.apache.clojure-mxnet.shape.html#var-product">product</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.symbol.html">org.apache.clojure-mxnet.symbol</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.25">%</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.25.24m-dc.24sp">%$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.25.24m-fc.24sp">%$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.25.24m-ic.24sp">%$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-*">*</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-*.24m-dc.24sp">*$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-*.24m-fc.24sp">*$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-*.24m-ic.24sp">*$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-**">**</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-**.24m-dc.24sp">**$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-**.24m-fc.24sp">**$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-**.24m-ic.24sp">**$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.2B">+</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.2B.24m-dc.24sp">+$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.2B.24m-fc.24sp">+$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.2B.24m-ic.24sp">+$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var--">-</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var--.24m-dc.24sp">-$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var--.24m-fc.24sp">-$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var--.24m-ic.24sp">-$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C">&lt;</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.24m-dc.24sp">&lt;$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.24m-fc.24sp">&lt;$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.24m-ic.24sp">&lt;$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.3D">&lt;=</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.3D.24m-dc.24sp">&lt;=$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.3D.24m-fc.24sp">&lt;=$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.3D.24m-ic.24sp">&lt;=$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E">&gt;</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.24m-dc.24sp">&gt;$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.24m-fc.24sp">&gt;$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.24m-ic.24sp">&gt;$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.3D">&gt;=</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.3D.24m-dc.24sp">&gt;=$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.3D.24m-fc.24sp">&gt;=$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.3D.24m-ic.24sp">&gt;=$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-abs">abs</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-activation">activation</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-adam-update">adam-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-add-n">add-n</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-all-finite">all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-amp-cast">amp-cast</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-amp-multicast">amp-multicast</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-api">api</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-apply">apply</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arange">arange</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arange-with-inference">arange-with-inference</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arccos">arccos</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arccosh">arccosh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arcsin">arcsin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arcsinh">arcsinh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arctan">arctan</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arctanh">arctanh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-argmax">argmax</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-argmax-channel">argmax-channel</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-argmin">argmin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-argsort">argsort</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-attr">attr</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-attr-map">attr-map</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-batch-dot">batch-dot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-batch-norm">batch-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-batch-norm-v1">batch-norm-v1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-batch-take">batch-take</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-bilinear-sampler">bilinear-sampler</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-bind">bind</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-block-grad">block-grad</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-add">broadcast-add</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-axes">broadcast-axes</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-axis">broadcast-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-div">broadcast-div</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-equal">broadcast-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-greater">broadcast-greater</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-greater-equal">broadcast-greater-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-hypot">broadcast-hypot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-lesser">broadcast-lesser</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-lesser-equal">broadcast-lesser-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-like">broadcast-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-logical-and">broadcast-logical-and</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-logical-or">broadcast-logical-or</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-logical-xor">broadcast-logical-xor</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-maximum">broadcast-maximum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-minimum">broadcast-minimum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-minus">broadcast-minus</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-mod">broadcast-mod</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-mul">broadcast-mul</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-not-equal">broadcast-not-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-plus">broadcast-plus</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-power">broadcast-power</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-sub">broadcast-sub</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-to">broadcast-to</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-bytes-allocated">bytes-allocated</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-cast">cast</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-cast-storage">cast-storage</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-cbrt">cbrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ceil">ceil</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-choose-element-0index">choose-element-0index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-clip">clip</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-clone">clone</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-close">close</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-concat">concat</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-convolution">convolution</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-convolution-v1">convolution-v1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-correlation">correlation</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-cos">cos</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-cosh">cosh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-create-from-listed-symbols">create-from-listed-symbols</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-create-from-listed-symbols-no-check">create-from-listed-symbols-no-check</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-create-from-named-symbols">create-from-named-symbols</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-create-from-named-symbols-no-check">create-from-named-symbols-no-check</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-creation-trace">creation-trace</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-crop">crop</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ctc-loss">ctc-loss</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-cumsum">cumsum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-custom">custom</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-debug-str">debug-str</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-deconvolution">deconvolution</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-degrees">degrees</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-depth-to-space">depth-to-space</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-diag">diag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-dispose">dispose</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-div">div</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-div.24m-dc.24sp">div$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-div.24m-fc.24sp">div$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-div.24m-ic.24sp">div$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-dot">dot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-dropout">dropout</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-element-wise-sum">element-wise-sum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-elemwise-add">elemwise-add</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-elemwise-div">elemwise-div</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-elemwise-mul">elemwise-mul</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-elemwise-sub">elemwise-sub</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-embedding">embedding</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-equal">equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-erf">erf</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-erfinv">erfinv</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-exp">exp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-expand-dims">expand-dims</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-expm1">expm1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-fill-element-0index">fill-element-0index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-finalize">finalize</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-fix">fix</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-flatten">flatten</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-flip">flip</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-floor">floor</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ftml-update">ftml-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ftrl-update">ftrl-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-fully-connected">fully-connected</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-gamma">gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-gammaln">gammaln</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-gather-nd">gather-nd</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-get">get</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-get-internals">get-internals</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-greater">greater</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-greater-equal">greater-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-grid-generator">grid-generator</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-group">group</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-group-norm">group-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-handle">handle</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-hard-sigmoid">hard-sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-identity">identity</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-identity-attach-kl-sparse-reg">identity-attach-kl-sparse-reg</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-infer-shape">infer-shape</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-infer-type">infer-type</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-instance-norm">instance-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-is-de-allocated">is-de-allocated</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-is-disposed">is-disposed</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-khatri-rao">khatri-rao</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-l2-normalization">l2-normalization</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-layer-norm">layer-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-leaky-re-lu">leaky-re-lu</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-lesser">lesser</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-lesser-equal">lesser-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-det">linalg-det</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-extractdiag">linalg-extractdiag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-extracttrian">linalg-extracttrian</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-gelqf">linalg-gelqf</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-gemm">linalg-gemm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-gemm2">linalg-gemm2</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-inverse">linalg-inverse</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-makediag">linalg-makediag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-maketrian">linalg-maketrian</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-potrf">linalg-potrf</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-potri">linalg-potri</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-slogdet">linalg-slogdet</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-sumlogdiag">linalg-sumlogdiag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-syrk">linalg-syrk</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-trmm">linalg-trmm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-trsm">linalg-trsm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linear-regression-output">linear-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-list-arguments">list-arguments</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-list-attr">list-attr</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-list-auxiliary-states">list-auxiliary-states</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-list-outputs">list-outputs</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-load">load</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-load-json">load-json</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-log">log</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-log-dispose-warning">log-dispose-warning</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-log-softmax">log-softmax</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-log10">log10</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-log1p">log1p</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-log2">log2</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-logical-not">logical-not</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-logistic-regression-output">logistic-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-lrn">lrn</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-mae-regression-output">mae-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-make-loss">make-loss</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-max">max</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-max-axis">max-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-mean">mean</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-min">min</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-min-axis">min-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-moments">moments</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-mp-nag-mom-update">mp-nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-mp-sgd-mom-update">mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-mp-sgd-update">mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-multi-all-finite">multi-all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-multi-mp-sgd-mom-update">multi-mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-multi-mp-sgd-update">multi-mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-multi-sgd-mom-update">multi-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-multi-sgd-update">multi-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-nag-mom-update">nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-nanprod">nanprod</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-nansum">nansum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-native-address">native-address</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-native-de-allocator">native-de-allocator</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-negative">negative</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-norm">norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-normal">normal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-not-equal">not-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-one-hot">one-hot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ones">ones</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ones-like">ones-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-org.apache.mxnet.symbol">org.apache.mxnet.symbol</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-pad">pad</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-pick">pick</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-pooling">pooling</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-pooling-v1">pooling-v1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-pow">pow</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-prod">prod</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-radians">radians</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random">random</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-exponential">random-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-gamma">random-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-generalized-negative-binomial">random-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-negative-binomial">random-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-normal">random-normal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-dirichlet">random-pdf-dirichlet</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-exponential">random-pdf-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-gamma">random-pdf-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-generalized-negative-binomial">random-pdf-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-negative-binomial">random-pdf-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-normal">random-pdf-normal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-poisson">random-pdf-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-uniform">random-pdf-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-poisson">random-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-randint">random-randint</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-uniform">random-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ravel-multi-index">ravel-multi-index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-rcbrt">rcbrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-reciprocal">reciprocal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ref">ref</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-register">register</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-relu">relu</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-repeat">repeat</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-reshape">reshape</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-reshape-like">reshape-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-reverse">reverse</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-rint">rint</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-rmsprop-update">rmsprop-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-rmspropalex-update">rmspropalex-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-rnn">rnn</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-roi-pooling">roi-pooling</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-round">round</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-rsqrt">rsqrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-exponential">sample-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-gamma">sample-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-generalized-negative-binomial">sample-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-multinomial">sample-multinomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-negative-binomial">sample-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-normal">sample-normal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-poisson">sample-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-uniform">sample-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-save">save</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-save-checkpoint">save-checkpoint</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-scatter-nd">scatter-nd</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-scope">scope</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sequence-last">sequence-last</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sequence-mask">sequence-mask</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sequence-reverse">sequence-reverse</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sgd-mom-update">sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sgd-update">sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-shape-array">shape-array</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-shuffle">shuffle</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sigmoid">sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sign">sign</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-signsgd-update">signsgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-signum-update">signum-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-simple-bind">simple-bind</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sin">sin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sinh">sinh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-size-array">size-array</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-slice">slice</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-slice-axis">slice-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-slice-channel">slice-channel</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-slice-like">slice-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-smooth-l1">smooth-l1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-softmax">softmax</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-softmax-activation">softmax-activation</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-softmax-cross-entropy">softmax-cross-entropy</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-softmax-output">softmax-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-softmin">softmin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-softsign">softsign</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sort">sort</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-space-to-depth">space-to-depth</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-spatial-transformer">spatial-transformer</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-split">split</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sqrt">sqrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-square">square</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-squeeze">squeeze</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-stack">stack</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-stop-gradient">stop-gradient</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sum">sum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sum-axis">sum-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-svm-output">svm-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-swap-axis">swap-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-swapaxes">swapaxes</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-take">take</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-tan">tan</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-tanh">tanh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-tile">tile</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-to-json">to-json</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-topk">topk</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-tracing-enabled">tracing-enabled</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-transpose">transpose</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-trunc">trunc</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-uniform">uniform</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-unravel-index">unravel-index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-up-sampling">up-sampling</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-variable">variable</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-where">where</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-zeros">zeros</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-zeros-like">zeros-like</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.symbol.html">org.apache.clojure-mxnet.symbol</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.25">%</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.25.24m-dc.24sp">%$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.25.24m-fc.24sp">%$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.25.24m-ic.24sp">%$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-*">*</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-*.24m-dc.24sp">*$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-*.24m-fc.24sp">*$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-*.24m-ic.24sp">*$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-**">**</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-**.24m-dc.24sp">**$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-**.24m-fc.24sp">**$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-**.24m-ic.24sp">**$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.2B">+</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.2B.24m-dc.24sp">+$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.2B.24m-fc.24sp">+$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.2B.24m-ic.24sp">+$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var--">-</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var--.24m-dc.24sp">-$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var--.24m-fc.24sp">-$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var--.24m-ic.24sp">-$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C">&lt;</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.24m-dc.24sp">&lt;$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.24m-fc.24sp">&lt;$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.24m-ic.24sp">&lt;$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.3D">&lt;=</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.3D.24m-dc.24sp">&lt;=$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.3D.24m-fc.24sp">&lt;=$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3C.3D.24m-ic.24sp">&lt;=$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E">&gt;</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.24m-dc.24sp">&gt;$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.24m-fc.24sp">&gt;$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.24m-ic.24sp">&gt;$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.3D">&gt;=</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.3D.24m-dc.24sp">&gt;=$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.3D.24m-fc.24sp">&gt;=$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-.3E.3D.24m-ic.24sp">&gt;=$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-abs">abs</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-activation">activation</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-adam-update">adam-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-add-n">add-n</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-all-finite">all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-amp-cast">amp-cast</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-amp-multicast">amp-multicast</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-api">api</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-apply">apply</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arange">arange</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arange-with-inference">arange-with-inference</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arccos">arccos</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arccosh">arccosh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arcsin">arcsin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arcsinh">arcsinh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arctan">arctan</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-arctanh">arctanh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-argmax">argmax</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-argmax-channel">argmax-channel</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-argmin">argmin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-argsort">argsort</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-attr">attr</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-attr-map">attr-map</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-batch-dot">batch-dot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-batch-norm">batch-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-batch-norm-v1">batch-norm-v1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-batch-take">batch-take</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-bilinear-sampler">bilinear-sampler</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-bind">bind</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-block-grad">block-grad</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-add">broadcast-add</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-axes">broadcast-axes</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-axis">broadcast-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-div">broadcast-div</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-equal">broadcast-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-greater">broadcast-greater</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-greater-equal">broadcast-greater-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-hypot">broadcast-hypot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-lesser">broadcast-lesser</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-lesser-equal">broadcast-lesser-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-like">broadcast-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-logical-and">broadcast-logical-and</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-logical-or">broadcast-logical-or</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-logical-xor">broadcast-logical-xor</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-maximum">broadcast-maximum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-minimum">broadcast-minimum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-minus">broadcast-minus</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-mod">broadcast-mod</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-mul">broadcast-mul</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-not-equal">broadcast-not-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-plus">broadcast-plus</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-power">broadcast-power</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-sub">broadcast-sub</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-broadcast-to">broadcast-to</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-bytes-allocated">bytes-allocated</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-cast">cast</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-cast-storage">cast-storage</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-cbrt">cbrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ceil">ceil</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-choose-element-0index">choose-element-0index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-clip">clip</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-clone">clone</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-close">close</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-concat">concat</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-convolution">convolution</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-convolution-v1">convolution-v1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-correlation">correlation</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-cos">cos</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-cosh">cosh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-create-from-listed-symbols">create-from-listed-symbols</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-create-from-listed-symbols-no-check">create-from-listed-symbols-no-check</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-create-from-named-symbols">create-from-named-symbols</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-create-from-named-symbols-no-check">create-from-named-symbols-no-check</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-creation-trace">creation-trace</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-crop">crop</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ctc-loss">ctc-loss</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-cumsum">cumsum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-custom">custom</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-debug-str">debug-str</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-deconvolution">deconvolution</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-degrees">degrees</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-depth-to-space">depth-to-space</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-diag">diag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-dispose">dispose</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-div">div</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-div.24m-dc.24sp">div$m-dc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-div.24m-fc.24sp">div$m-fc$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-div.24m-ic.24sp">div$m-ic$sp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-dot">dot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-dropout">dropout</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-element-wise-sum">element-wise-sum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-elemwise-add">elemwise-add</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-elemwise-div">elemwise-div</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-elemwise-mul">elemwise-mul</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-elemwise-sub">elemwise-sub</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-embedding">embedding</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-equal">equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-erf">erf</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-erfinv">erfinv</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-exp">exp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-expand-dims">expand-dims</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-expm1">expm1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-fill-element-0index">fill-element-0index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-finalize">finalize</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-fix">fix</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-flatten">flatten</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-flip">flip</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-floor">floor</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ftml-update">ftml-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ftrl-update">ftrl-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-fully-connected">fully-connected</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-gamma">gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-gammaln">gammaln</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-gather-nd">gather-nd</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-get">get</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-get-internals">get-internals</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-greater">greater</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-greater-equal">greater-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-grid-generator">grid-generator</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-group">group</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-group-norm">group-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-handle">handle</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-hard-sigmoid">hard-sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-identity">identity</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-identity-attach-kl-sparse-reg">identity-attach-kl-sparse-reg</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-infer-shape">infer-shape</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-infer-type">infer-type</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-instance-norm">instance-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-is-de-allocated">is-de-allocated</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-is-disposed">is-disposed</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-khatri-rao">khatri-rao</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-l2-normalization">l2-normalization</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-layer-norm">layer-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-leaky-re-lu">leaky-re-lu</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-lesser">lesser</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-lesser-equal">lesser-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-det">linalg-det</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-extractdiag">linalg-extractdiag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-extracttrian">linalg-extracttrian</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-gelqf">linalg-gelqf</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-gemm">linalg-gemm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-gemm2">linalg-gemm2</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-inverse">linalg-inverse</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-makediag">linalg-makediag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-maketrian">linalg-maketrian</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-potrf">linalg-potrf</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-potri">linalg-potri</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-slogdet">linalg-slogdet</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-sumlogdiag">linalg-sumlogdiag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-syrk">linalg-syrk</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-trmm">linalg-trmm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linalg-trsm">linalg-trsm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-linear-regression-output">linear-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-list-arguments">list-arguments</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-list-attr">list-attr</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-list-auxiliary-states">list-auxiliary-states</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-list-outputs">list-outputs</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-load">load</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-load-json">load-json</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-log">log</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-log-dispose-warning">log-dispose-warning</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-log-softmax">log-softmax</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-log10">log10</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-log1p">log1p</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-log2">log2</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-logical-not">logical-not</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-logistic-regression-output">logistic-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-lrn">lrn</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-mae-regression-output">mae-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-make-loss">make-loss</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-max">max</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-max-axis">max-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-mean">mean</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-min">min</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-min-axis">min-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-moments">moments</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-mp-nag-mom-update">mp-nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-mp-sgd-mom-update">mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-mp-sgd-update">mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-multi-all-finite">multi-all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-multi-mp-sgd-mom-update">multi-mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-multi-mp-sgd-update">multi-mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-multi-sgd-mom-update">multi-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-multi-sgd-update">multi-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-nag-mom-update">nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-nanprod">nanprod</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-nansum">nansum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-native-address">native-address</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-native-de-allocator">native-de-allocator</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-negative">negative</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-norm">norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-normal">normal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-not-equal">not-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-one-hot">one-hot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ones">ones</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ones-like">ones-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-org.apache.mxnet.symbol">org.apache.mxnet.symbol</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-pad">pad</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-pick">pick</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-pooling">pooling</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-pooling-v1">pooling-v1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-pow">pow</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-prod">prod</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-radians">radians</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random">random</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-exponential">random-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-gamma">random-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-generalized-negative-binomial">random-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-negative-binomial">random-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-normal">random-normal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-dirichlet">random-pdf-dirichlet</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-exponential">random-pdf-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-gamma">random-pdf-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-generalized-negative-binomial">random-pdf-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-negative-binomial">random-pdf-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-normal">random-pdf-normal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-poisson">random-pdf-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-pdf-uniform">random-pdf-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-poisson">random-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-randint">random-randint</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-random-uniform">random-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ravel-multi-index">ravel-multi-index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-rcbrt">rcbrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-reciprocal">reciprocal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-ref">ref</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-register">register</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-relu">relu</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-repeat">repeat</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-reshape">reshape</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-reshape-like">reshape-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-reverse">reverse</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-rint">rint</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-rmsprop-update">rmsprop-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-rmspropalex-update">rmspropalex-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-rnn">rnn</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-roi-pooling">roi-pooling</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-round">round</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-rsqrt">rsqrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-exponential">sample-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-gamma">sample-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-generalized-negative-binomial">sample-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-multinomial">sample-multinomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-negative-binomial">sample-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-normal">sample-normal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-poisson">sample-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sample-uniform">sample-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-save">save</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-save-checkpoint">save-checkpoint</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-scatter-nd">scatter-nd</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-scope">scope</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sequence-last">sequence-last</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sequence-mask">sequence-mask</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sequence-reverse">sequence-reverse</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sgd-mom-update">sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sgd-update">sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-shape-array">shape-array</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-shuffle">shuffle</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sigmoid">sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sign">sign</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-signsgd-update">signsgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-signum-update">signum-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-simple-bind">simple-bind</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sin">sin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sinh">sinh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-size-array">size-array</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-slice">slice</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-slice-axis">slice-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-slice-channel">slice-channel</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-slice-like">slice-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-smooth-l1">smooth-l1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-softmax">softmax</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-softmax-activation">softmax-activation</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-softmax-cross-entropy">softmax-cross-entropy</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-softmax-output">softmax-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-softmin">softmin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-softsign">softsign</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sort">sort</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-space-to-depth">space-to-depth</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-spatial-transformer">spatial-transformer</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-split">split</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sqrt">sqrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-square">square</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-squeeze">squeeze</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-stack">stack</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-stop-gradient">stop-gradient</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sum">sum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-sum-axis">sum-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-svm-output">svm-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-swap-axis">swap-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-swapaxes">swapaxes</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-take">take</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-tan">tan</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-tanh">tanh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-tile">tile</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-to-json">to-json</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-topk">topk</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-tracing-enabled">tracing-enabled</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-transpose">transpose</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-trunc">trunc</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-uniform">uniform</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-unravel-index">unravel-index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-up-sampling">up-sampling</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-variable">variable</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-where">where</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-zeros">zeros</a> </li><li> <a href="org.apache.clojure-mxnet.symbol.html#var-zeros-like">zeros-like</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.symbol-api.html">org.apache.clojure-mxnet.symbol-api</a></h3><div class="doc"><pre class="plaintext">Experimental</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--copy">-copy</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-det">-linalg-det</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-extractdiag">-linalg-extractdiag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-extracttrian">-linalg-extracttrian</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-gelqf">-linalg-gelqf</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-gemm">-linalg-gemm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-gemm2">-linalg-gemm2</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-inverse">-linalg-inverse</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-makediag">-linalg-makediag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-maketrian">-linalg-maketrian</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-potrf">-linalg-potrf</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-potri">-linalg-potri</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-slogdet">-linalg-slogdet</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-sumlogdiag">-linalg-sumlogdiag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-syrk">-linalg-syrk</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-trmm">-linalg-trmm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-trsm">-linalg-trsm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--np-cumsum">-np-cumsum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--ravel-multi-index">-ravel-multi-index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--shuffle">-shuffle</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--unravel-index">-unravel-index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-abs">abs</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-activation">activation</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-adam-update">adam-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-add-n">add-n</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-all-finite">all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-amp-cast">amp-cast</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-amp-multicast">amp-multicast</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-arccos">arccos</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-arccosh">arccosh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-arcsin">arcsin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-arcsinh">arcsinh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-arctan">arctan</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-arctanh">arctanh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-argmax">argmax</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-argmax-channel">argmax-channel</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-argmin">argmin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-argsort">argsort</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-batch-dot">batch-dot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-batch-norm">batch-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-batch-norm-v1">batch-norm-v1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-batch-take">batch-take</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-bilinear-sampler">bilinear-sampler</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-block-grad">block-grad</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-add">broadcast-add</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-axis">broadcast-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-div">broadcast-div</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-equal">broadcast-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-greater">broadcast-greater</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-greater-equal">broadcast-greater-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-hypot">broadcast-hypot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-lesser">broadcast-lesser</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-lesser-equal">broadcast-lesser-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-like">broadcast-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-logical-and">broadcast-logical-and</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-logical-or">broadcast-logical-or</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-logical-xor">broadcast-logical-xor</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-maximum">broadcast-maximum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-minimum">broadcast-minimum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-mod">broadcast-mod</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-mul">broadcast-mul</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-not-equal">broadcast-not-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-power">broadcast-power</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-sub">broadcast-sub</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-to">broadcast-to</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-cast">cast</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-cast-storage">cast-storage</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-cbrt">cbrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-ceil">ceil</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-clip">clip</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-concat">concat</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-convolution">convolution</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-convolution-v1">convolution-v1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-correlation">correlation</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-cos">cos</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-cosh">cosh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-crop">crop</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-ctc-loss">ctc-loss</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-deconvolution">deconvolution</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-degrees">degrees</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-depth-to-space">depth-to-space</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-diag">diag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-dot">dot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-dropout">dropout</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-elemwise-add">elemwise-add</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-elemwise-div">elemwise-div</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-elemwise-mul">elemwise-mul</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-elemwise-sub">elemwise-sub</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-embedding">embedding</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-erf">erf</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-erfinv">erfinv</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-exp">exp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-expand-dims">expand-dims</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-expm1">expm1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-fill-element-0index">fill-element-0index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-fix">fix</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-flatten">flatten</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-floor">floor</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-ftml-update">ftml-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-ftrl-update">ftrl-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-fully-connected">fully-connected</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-gamma">gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-gammaln">gammaln</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-gather-nd">gather-nd</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-grid-generator">grid-generator</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-group-norm">group-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-hard-sigmoid">hard-sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-identity-attach-kl-sparse-reg">identity-attach-kl-sparse-reg</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-instance-norm">instance-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-khatri-rao">khatri-rao</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-l2-normalization">l2-normalization</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-layer-norm">layer-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-leaky-re-lu">leaky-re-lu</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-linear-regression-output">linear-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-log">log</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-log-softmax">log-softmax</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-log10">log10</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-log1p">log1p</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-log2">log2</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-logical-not">logical-not</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-logistic-regression-output">logistic-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-lrn">lrn</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-mae-regression-output">mae-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-make-loss">make-loss</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-max">max</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-mean">mean</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-min">min</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-moments">moments</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-mp-nag-mom-update">mp-nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-mp-sgd-mom-update">mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-mp-sgd-update">mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-multi-all-finite">multi-all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-multi-mp-sgd-mom-update">multi-mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-multi-mp-sgd-update">multi-mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-multi-sgd-mom-update">multi-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-multi-sgd-update">multi-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-nag-mom-update">nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-nanprod">nanprod</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-nansum">nansum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-negative">negative</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-norm">norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-one-hot">one-hot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-ones-like">ones-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-pad">pad</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-pick">pick</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-pooling">pooling</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-pooling-v1">pooling-v1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-prod">prod</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-radians">radians</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-rcbrt">rcbrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-reciprocal">reciprocal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-relu">relu</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-repeat">repeat</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-reshape">reshape</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-reshape-like">reshape-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-reverse">reverse</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-rint">rint</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-rmsprop-update">rmsprop-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-rmspropalex-update">rmspropalex-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-rnn">rnn</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-roi-pooling">roi-pooling</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-round">round</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-rsqrt">rsqrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-scatter-nd">scatter-nd</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sequence-last">sequence-last</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sequence-mask">sequence-mask</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sequence-reverse">sequence-reverse</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sgd-mom-update">sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sgd-update">sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-shape-array">shape-array</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sigmoid">sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sign">sign</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-signsgd-update">signsgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-signum-update">signum-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sin">sin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sinh">sinh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-size-array">size-array</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-slice">slice</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-slice-axis">slice-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-slice-channel">slice-channel</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-slice-like">slice-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-smooth-l1">smooth-l1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-softmax">softmax</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-softmax-activation">softmax-activation</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-softmax-cross-entropy">softmax-cross-entropy</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-softmax-output">softmax-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-softmin">softmin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-softsign">softsign</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sort">sort</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-space-to-depth">space-to-depth</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-spatial-transformer">spatial-transformer</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sqrt">sqrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-square">square</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-squeeze">squeeze</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-stack">stack</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sum">sum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-svm-output">svm-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-swap-axis">swap-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-take">take</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-tan">tan</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-tanh">tanh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-tile">tile</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-topk">topk</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-transpose">transpose</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-trunc">trunc</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-up-sampling">up-sampling</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-where">where</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-zeros-like">zeros-like</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.symbol-api.html">org.apache.clojure-mxnet.symbol-api</a></h3><div class="doc"><pre class="plaintext">Experimental</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--copy">-copy</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-det">-linalg-det</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-extractdiag">-linalg-extractdiag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-extracttrian">-linalg-extracttrian</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-gelqf">-linalg-gelqf</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-gemm">-linalg-gemm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-gemm2">-linalg-gemm2</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-inverse">-linalg-inverse</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-makediag">-linalg-makediag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-maketrian">-linalg-maketrian</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-potrf">-linalg-potrf</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-potri">-linalg-potri</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-slogdet">-linalg-slogdet</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-sumlogdiag">-linalg-sumlogdiag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-syrk">-linalg-syrk</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-trmm">-linalg-trmm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--linalg-trsm">-linalg-trsm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--np-cumsum">-np-cumsum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--ravel-multi-index">-ravel-multi-index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--shuffle">-shuffle</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var--unravel-index">-unravel-index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-abs">abs</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-activation">activation</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-adam-update">adam-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-add-n">add-n</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-all-finite">all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-amp-cast">amp-cast</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-amp-multicast">amp-multicast</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-arccos">arccos</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-arccosh">arccosh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-arcsin">arcsin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-arcsinh">arcsinh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-arctan">arctan</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-arctanh">arctanh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-argmax">argmax</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-argmax-channel">argmax-channel</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-argmin">argmin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-argsort">argsort</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-batch-dot">batch-dot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-batch-norm">batch-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-batch-norm-v1">batch-norm-v1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-batch-take">batch-take</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-bilinear-sampler">bilinear-sampler</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-block-grad">block-grad</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-add">broadcast-add</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-axis">broadcast-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-div">broadcast-div</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-equal">broadcast-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-greater">broadcast-greater</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-greater-equal">broadcast-greater-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-hypot">broadcast-hypot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-lesser">broadcast-lesser</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-lesser-equal">broadcast-lesser-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-like">broadcast-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-logical-and">broadcast-logical-and</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-logical-or">broadcast-logical-or</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-logical-xor">broadcast-logical-xor</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-maximum">broadcast-maximum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-minimum">broadcast-minimum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-mod">broadcast-mod</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-mul">broadcast-mul</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-not-equal">broadcast-not-equal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-power">broadcast-power</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-sub">broadcast-sub</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-broadcast-to">broadcast-to</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-cast">cast</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-cast-storage">cast-storage</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-cbrt">cbrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-ceil">ceil</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-clip">clip</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-concat">concat</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-convolution">convolution</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-convolution-v1">convolution-v1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-correlation">correlation</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-cos">cos</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-cosh">cosh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-crop">crop</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-ctc-loss">ctc-loss</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-deconvolution">deconvolution</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-degrees">degrees</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-depth-to-space">depth-to-space</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-diag">diag</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-dot">dot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-dropout">dropout</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-elemwise-add">elemwise-add</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-elemwise-div">elemwise-div</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-elemwise-mul">elemwise-mul</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-elemwise-sub">elemwise-sub</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-embedding">embedding</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-erf">erf</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-erfinv">erfinv</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-exp">exp</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-expand-dims">expand-dims</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-expm1">expm1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-fill-element-0index">fill-element-0index</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-fix">fix</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-flatten">flatten</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-floor">floor</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-ftml-update">ftml-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-ftrl-update">ftrl-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-fully-connected">fully-connected</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-gamma">gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-gammaln">gammaln</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-gather-nd">gather-nd</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-grid-generator">grid-generator</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-group-norm">group-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-hard-sigmoid">hard-sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-identity-attach-kl-sparse-reg">identity-attach-kl-sparse-reg</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-instance-norm">instance-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-khatri-rao">khatri-rao</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-l2-normalization">l2-normalization</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-layer-norm">layer-norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-leaky-re-lu">leaky-re-lu</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-linear-regression-output">linear-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-log">log</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-log-softmax">log-softmax</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-log10">log10</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-log1p">log1p</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-log2">log2</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-logical-not">logical-not</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-logistic-regression-output">logistic-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-lrn">lrn</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-mae-regression-output">mae-regression-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-make-loss">make-loss</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-max">max</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-mean">mean</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-min">min</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-moments">moments</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-mp-nag-mom-update">mp-nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-mp-sgd-mom-update">mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-mp-sgd-update">mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-multi-all-finite">multi-all-finite</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-multi-mp-sgd-mom-update">multi-mp-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-multi-mp-sgd-update">multi-mp-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-multi-sgd-mom-update">multi-sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-multi-sgd-update">multi-sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-nag-mom-update">nag-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-nanprod">nanprod</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-nansum">nansum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-negative">negative</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-norm">norm</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-one-hot">one-hot</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-ones-like">ones-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-pad">pad</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-pick">pick</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-pooling">pooling</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-pooling-v1">pooling-v1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-prod">prod</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-radians">radians</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-rcbrt">rcbrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-reciprocal">reciprocal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-relu">relu</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-repeat">repeat</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-reshape">reshape</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-reshape-like">reshape-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-reverse">reverse</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-rint">rint</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-rmsprop-update">rmsprop-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-rmspropalex-update">rmspropalex-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-rnn">rnn</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-roi-pooling">roi-pooling</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-round">round</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-rsqrt">rsqrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-scatter-nd">scatter-nd</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sequence-last">sequence-last</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sequence-mask">sequence-mask</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sequence-reverse">sequence-reverse</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sgd-mom-update">sgd-mom-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sgd-update">sgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-shape-array">shape-array</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sigmoid">sigmoid</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sign">sign</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-signsgd-update">signsgd-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-signum-update">signum-update</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sin">sin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sinh">sinh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-size-array">size-array</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-slice">slice</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-slice-axis">slice-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-slice-channel">slice-channel</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-slice-like">slice-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-smooth-l1">smooth-l1</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-softmax">softmax</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-softmax-activation">softmax-activation</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-softmax-cross-entropy">softmax-cross-entropy</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-softmax-output">softmax-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-softmin">softmin</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-softsign">softsign</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sort">sort</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-space-to-depth">space-to-depth</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-spatial-transformer">spatial-transformer</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sqrt">sqrt</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-square">square</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-squeeze">squeeze</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-stack">stack</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-sum">sum</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-svm-output">svm-output</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-swap-axis">swap-axis</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-take">take</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-tan">tan</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-tanh">tanh</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-tile">tile</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-topk">topk</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-transpose">transpose</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-trunc">trunc</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-up-sampling">up-sampling</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-where">where</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-api.html#var-zeros-like">zeros-like</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.symbol-random-api.html">org.apache.clojure-mxnet.symbol-random-api</a></h3><div class="doc"><pre class="plaintext">Experimental</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-exponential">exponential</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-exponential-like">exponential-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-gamma">gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-gamma-like">gamma-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-generalized-negative-binomial">generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-generalized-negative-binomial-like">generalized-negative-binomial-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-multinomial-like">multinomial-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-negative-binomial">negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-negative-binomial-like">negative-binomial-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-normal">normal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-normal-like">normal-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-dirichlet">pdf-dirichlet</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-exponential">pdf-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-gamma">pdf-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-generalized-negative-binomial">pdf-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-negative-binomial">pdf-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-normal">pdf-normal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-poisson">pdf-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-uniform">pdf-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-poisson">poisson</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-poisson-like">poisson-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-randint">randint</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-uniform">uniform</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-uniform-like">uniform-like</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.symbol-random-api.html">org.apache.clojure-mxnet.symbol-random-api</a></h3><div class="doc"><pre class="plaintext">Experimental</pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-exponential">exponential</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-exponential-like">exponential-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-gamma">gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-gamma-like">gamma-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-generalized-negative-binomial">generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-generalized-negative-binomial-like">generalized-negative-binomial-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-multinomial-like">multinomial-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-negative-binomial">negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-negative-binomial-like">negative-binomial-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-normal">normal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-normal-like">normal-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-dirichlet">pdf-dirichlet</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-exponential">pdf-exponential</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-gamma">pdf-gamma</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-generalized-negative-binomial">pdf-generalized-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-negative-binomial">pdf-negative-binomial</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-normal">pdf-normal</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-poisson">pdf-poisson</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-pdf-uniform">pdf-uniform</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-poisson">poisson</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-poisson-like">poisson-like</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-randint">randint</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-uniform">uniform</a> </li><li> <a href="org.apache.clojure-mxnet.symbol-random-api.html#var-uniform-like">uniform-like</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.util.html">org.apache.clojure-mxnet.util</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.util.html#var--.3Eint-option">-&gt;int-option</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var--.3Eoption">-&gt;option</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-apply-scala-fn">apply-scala-fn</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-buffer-.3Evec">buffer-&gt;vec</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-coerce-param">coerce-param</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-coerce-return">coerce-return</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-coerce-return-recursive">coerce-return-recursive</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-convert-by-shape">convert-by-shape</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-convert-io-map">convert-io-map</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-convert-map">convert-map</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-convert-shape-map">convert-shape-map</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-convert-symbol-map">convert-symbol-map</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-convert-tuple">convert-tuple</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-convert-vector">convert-vector</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-empty-indexed-seq">empty-indexed-seq</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-empty-list">empty-list</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-empty-list-map">empty-list-map</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-empty-map">empty-map</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-forms-.3Escala-fn">forms-&gt;scala-fn</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-io-convert-by-param-name">io-convert-by-param-name</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-io-param-names">io-param-names</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-keyword-.3Esnake-case">keyword-&gt;snake-case</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-list-map">list-map</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-map-.3Escala-tuple-seq">map-&gt;scala-tuple-seq</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-map-.3Etuple">map-&gt;tuple</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-nd-seq-shape">nd-seq-shape</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-ndarray-param-coerce">ndarray-param-coerce</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-nil-or-coerce-param">nil-or-coerce-param</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-option-.3Evalue">option-&gt;value</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-scala-fn">scala-fn</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-scala-iterator-.3Eseq">scala-iterator-&gt;seq</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-scala-map-.3Emap">scala-map-&gt;map</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-scala-vector-.3Evec">scala-vector-&gt;vec</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-symbol-param-coerce">symbol-param-coerce</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-to-array-nd">to-array-nd</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-translate-keyword-shape">translate-keyword-shape</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-tuple-.3Evec">tuple-&gt;vec</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-tuple-convert-by-param-name">tuple-convert-by-param-name</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-tuple-param-names">tuple-param-names</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-validate.21">validate!</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-vec-.3Eindexed-seq">vec-&gt;indexed-seq</a> </li><li> <a href="org.apache.clojure-mxnet.util.html#var-vec-.3Eset">vec-&gt;set</a> </li></ul></div></div><div class="namespace"><h3><a href="org.apache.clojure-mxnet.visualization.html">org.apache.clojure-mxnet.visualization</a></h3><div class="doc"><pre class="plaintext"></pre></div><div class="index"><p>Public variables and functions:</p><ul><li> <a href="org.apache.clojure-mxnet.visualization.html#var-plot-network">plot-network</a> </li><li> <a href="org.apache.clojure-mxnet.visualization.html#var-render">render</a> </li></ul></div></div></div></body></html>
\ No newline at end of file
diff --git a/api/clojure/docs/api/org.apache.clojure-mxnet.ndarray-api.html b/api/clojure/docs/api/org.apache.clojure-mxnet.ndarray-api.html
deleted file mode 100644
index 2535620..0000000
--- a/api/clojure/docs/api/org.apache.clojure-mxnet.ndarray-api.html
+++ /dev/null
@@ -1,5887 +0,0 @@
-<!DOCTYPE html PUBLIC ""
-    "">
-<html><head><meta charset="UTF-8" /><title>org.apache.clojure-mxnet.ndarray-api documentation</title><link rel="stylesheet" type="text/css" href="css/default.css" /><link rel="stylesheet" type="text/css" href="css/highlight.css" /><script type="text/javascript" src="js/highlight.min.js"></script><script type="text/javascript" src="js/jquery.min.js"></script><script type="text/javascript" src="js/page_effects.js"></script><script>hljs.initHighlightingOnLoad();</script></head><body><div id="header"><h2>Generated by <a href="https://github.com/weavejester/codox">Codox</a></h2><h1><a href="index.html"><span class="project-title"><span class="project-name">Clojure-mxnet</span> <span class="project-version">1.6.0-SNAPSHOT</span></span></a></h1></div><div class="sidebar primary"><h3 class="no-link"><span class="inner">Project</span></h3><ul class="index-link"><li class="depth-1 "><a href="index.html"><div class="inner">Index</div></a></li></ul><h3 class="no-link"><span class="inner">Namespaces</span></h3><ul><li class="depth-1"><div class="no-link"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>org</span></div></div></li><li class="depth-2"><div class="no-link"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>apache</span></div></div></li><li class="depth-3"><div class="no-link"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>clojure-mxnet</span></div></div></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.base.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>base</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.callback.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>callback</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.context.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>context</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.dtype.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>dtype</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.eval-metric.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>eval-metric</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.executor.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>executor</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.image.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>image</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.infer.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>infer</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.initializer.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>initializer</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.io.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>io</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.kvstore.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>kvstore</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.kvstore-server.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>kvstore-server</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.layout.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>layout</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.lr-scheduler.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>lr-scheduler</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.module.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>module</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.monitor.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>monitor</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.ndarray.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>ndarray</span></div></a></li><li class="depth-4 branch current"><a href="org.apache.clojure-mxnet.ndarray-api.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>ndarray-api</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.ndarray-random-api.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>ndarray-random-api</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.optimizer.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>optimizer</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.primitives.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>primitives</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.profiler.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>profiler</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.random.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>random</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.resource-scope.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>resource-scope</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.shape.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>shape</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.symbol.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>symbol</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.symbol-api.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>symbol-api</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.symbol-random-api.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>symbol-random-api</span></div></a></li><li class="depth-4 branch"><a href="org.apache.clojure-mxnet.util.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>util</span></div></a></li><li class="depth-4"><a href="org.apache.clojure-mxnet.visualization.html"><div class="inner"><span class="tree"><span class="top"></span><span class="bottom"></span></span><span>visualization</span></div></a></li></ul></div><div class="sidebar secondary"><h3><a href="#top"><span class="inner">Public Vars</span></a></h3><ul><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--copy"><div class="inner"><span>-copy</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-det"><div class="inner"><span>-linalg-det</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-extractdiag"><div class="inner"><span>-linalg-extractdiag</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-extracttrian"><div class="inner"><span>-linalg-extracttrian</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-gelqf"><div class="inner"><span>-linalg-gelqf</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-gemm"><div class="inner"><span>-linalg-gemm</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-gemm2"><div class="inner"><span>-linalg-gemm2</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-inverse"><div class="inner"><span>-linalg-inverse</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-makediag"><div class="inner"><span>-linalg-makediag</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-maketrian"><div class="inner"><span>-linalg-maketrian</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-potrf"><div class="inner"><span>-linalg-potrf</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-potri"><div class="inner"><span>-linalg-potri</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-slogdet"><div class="inner"><span>-linalg-slogdet</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-sumlogdiag"><div class="inner"><span>-linalg-sumlogdiag</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-syrk"><div class="inner"><span>-linalg-syrk</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-trmm"><div class="inner"><span>-linalg-trmm</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--linalg-trsm"><div class="inner"><span>-linalg-trsm</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--np-cumsum"><div class="inner"><span>-np-cumsum</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--ravel-multi-index"><div class="inner"><span>-ravel-multi-index</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--shuffle"><div class="inner"><span>-shuffle</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var--unravel-index"><div class="inner"><span>-unravel-index</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-abs"><div class="inner"><span>abs</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-activation"><div class="inner"><span>activation</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-adam-update"><div class="inner"><span>adam-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-add-n"><div class="inner"><span>add-n</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-all-finite"><div class="inner"><span>all-finite</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-amp-cast"><div class="inner"><span>amp-cast</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-amp-multicast"><div class="inner"><span>amp-multicast</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-arccos"><div class="inner"><span>arccos</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-arccosh"><div class="inner"><span>arccosh</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-arcsin"><div class="inner"><span>arcsin</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-arcsinh"><div class="inner"><span>arcsinh</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-arctan"><div class="inner"><span>arctan</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-arctanh"><div class="inner"><span>arctanh</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-argmax"><div class="inner"><span>argmax</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-argmax-channel"><div class="inner"><span>argmax-channel</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-argmin"><div class="inner"><span>argmin</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-argsort"><div class="inner"><span>argsort</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-batch-dot"><div class="inner"><span>batch-dot</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-batch-norm"><div class="inner"><span>batch-norm</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-batch-norm-v1"><div class="inner"><span>batch-norm-v1</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-batch-take"><div class="inner"><span>batch-take</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-bilinear-sampler"><div class="inner"><span>bilinear-sampler</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-block-grad"><div class="inner"><span>block-grad</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-add"><div class="inner"><span>broadcast-add</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-axis"><div class="inner"><span>broadcast-axis</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-div"><div class="inner"><span>broadcast-div</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-equal"><div class="inner"><span>broadcast-equal</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-greater"><div class="inner"><span>broadcast-greater</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-greater-equal"><div class="inner"><span>broadcast-greater-equal</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-hypot"><div class="inner"><span>broadcast-hypot</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-lesser"><div class="inner"><span>broadcast-lesser</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-lesser-equal"><div class="inner"><span>broadcast-lesser-equal</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-like"><div class="inner"><span>broadcast-like</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-logical-and"><div class="inner"><span>broadcast-logical-and</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-logical-or"><div class="inner"><span>broadcast-logical-or</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-logical-xor"><div class="inner"><span>broadcast-logical-xor</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-maximum"><div class="inner"><span>broadcast-maximum</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-minimum"><div class="inner"><span>broadcast-minimum</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-mod"><div class="inner"><span>broadcast-mod</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-mul"><div class="inner"><span>broadcast-mul</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-not-equal"><div class="inner"><span>broadcast-not-equal</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-power"><div class="inner"><span>broadcast-power</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-sub"><div class="inner"><span>broadcast-sub</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-broadcast-to"><div class="inner"><span>broadcast-to</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-cast"><div class="inner"><span>cast</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-cast-storage"><div class="inner"><span>cast-storage</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-cbrt"><div class="inner"><span>cbrt</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-ceil"><div class="inner"><span>ceil</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-clip"><div class="inner"><span>clip</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-concat"><div class="inner"><span>concat</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-convolution"><div class="inner"><span>convolution</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-convolution-v1"><div class="inner"><span>convolution-v1</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-correlation"><div class="inner"><span>correlation</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-cos"><div class="inner"><span>cos</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-cosh"><div class="inner"><span>cosh</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-crop"><div class="inner"><span>crop</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-ctc-loss"><div class="inner"><span>ctc-loss</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-deconvolution"><div class="inner"><span>deconvolution</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-degrees"><div class="inner"><span>degrees</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-depth-to-space"><div class="inner"><span>depth-to-space</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-diag"><div class="inner"><span>diag</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-dot"><div class="inner"><span>dot</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-dropout"><div class="inner"><span>dropout</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-elemwise-add"><div class="inner"><span>elemwise-add</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-elemwise-div"><div class="inner"><span>elemwise-div</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-elemwise-mul"><div class="inner"><span>elemwise-mul</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-elemwise-sub"><div class="inner"><span>elemwise-sub</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-embedding"><div class="inner"><span>embedding</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-erf"><div class="inner"><span>erf</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-erfinv"><div class="inner"><span>erfinv</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-exp"><div class="inner"><span>exp</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-expand-dims"><div class="inner"><span>expand-dims</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-expm1"><div class="inner"><span>expm1</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-fill-element-0index"><div class="inner"><span>fill-element-0index</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-fix"><div class="inner"><span>fix</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-flatten"><div class="inner"><span>flatten</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-floor"><div class="inner"><span>floor</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-ftml-update"><div class="inner"><span>ftml-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-ftrl-update"><div class="inner"><span>ftrl-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-fully-connected"><div class="inner"><span>fully-connected</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-gamma"><div class="inner"><span>gamma</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-gammaln"><div class="inner"><span>gammaln</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-gather-nd"><div class="inner"><span>gather-nd</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-grid-generator"><div class="inner"><span>grid-generator</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-group-norm"><div class="inner"><span>group-norm</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-hard-sigmoid"><div class="inner"><span>hard-sigmoid</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-identity-attach-kl-sparse-reg"><div class="inner"><span>identity-attach-kl-sparse-reg</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-instance-norm"><div class="inner"><span>instance-norm</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-khatri-rao"><div class="inner"><span>khatri-rao</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-l2-normalization"><div class="inner"><span>l2-normalization</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-layer-norm"><div class="inner"><span>layer-norm</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-leaky-re-lu"><div class="inner"><span>leaky-re-lu</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-linear-regression-output"><div class="inner"><span>linear-regression-output</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-log"><div class="inner"><span>log</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-log-softmax"><div class="inner"><span>log-softmax</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-log10"><div class="inner"><span>log10</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-log1p"><div class="inner"><span>log1p</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-log2"><div class="inner"><span>log2</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-logical-not"><div class="inner"><span>logical-not</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-logistic-regression-output"><div class="inner"><span>logistic-regression-output</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-lrn"><div class="inner"><span>lrn</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-mae-regression-output"><div class="inner"><span>mae-regression-output</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-make-loss"><div class="inner"><span>make-loss</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-max"><div class="inner"><span>max</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-mean"><div class="inner"><span>mean</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-min"><div class="inner"><span>min</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-moments"><div class="inner"><span>moments</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-mp-nag-mom-update"><div class="inner"><span>mp-nag-mom-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-mp-sgd-mom-update"><div class="inner"><span>mp-sgd-mom-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-mp-sgd-update"><div class="inner"><span>mp-sgd-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-all-finite"><div class="inner"><span>multi-all-finite</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-mp-sgd-mom-update"><div class="inner"><span>multi-mp-sgd-mom-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-mp-sgd-update"><div class="inner"><span>multi-mp-sgd-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-sgd-mom-update"><div class="inner"><span>multi-sgd-mom-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-multi-sgd-update"><div class="inner"><span>multi-sgd-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-nag-mom-update"><div class="inner"><span>nag-mom-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-nanprod"><div class="inner"><span>nanprod</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-nansum"><div class="inner"><span>nansum</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-negative"><div class="inner"><span>negative</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-norm"><div class="inner"><span>norm</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-one-hot"><div class="inner"><span>one-hot</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-ones-like"><div class="inner"><span>ones-like</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-pad"><div class="inner"><span>pad</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-pick"><div class="inner"><span>pick</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-pooling"><div class="inner"><span>pooling</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-pooling-v1"><div class="inner"><span>pooling-v1</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-prod"><div class="inner"><span>prod</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-radians"><div class="inner"><span>radians</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-rcbrt"><div class="inner"><span>rcbrt</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-reciprocal"><div class="inner"><span>reciprocal</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-relu"><div class="inner"><span>relu</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-repeat"><div class="inner"><span>repeat</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-reshape"><div class="inner"><span>reshape</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-reshape-like"><div class="inner"><span>reshape-like</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-reverse"><div class="inner"><span>reverse</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-rint"><div class="inner"><span>rint</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-rmsprop-update"><div class="inner"><span>rmsprop-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-rmspropalex-update"><div class="inner"><span>rmspropalex-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-rnn"><div class="inner"><span>rnn</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-roi-pooling"><div class="inner"><span>roi-pooling</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-round"><div class="inner"><span>round</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-rsqrt"><div class="inner"><span>rsqrt</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-scatter-nd"><div class="inner"><span>scatter-nd</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-sequence-last"><div class="inner"><span>sequence-last</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-sequence-mask"><div class="inner"><span>sequence-mask</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-sequence-reverse"><div class="inner"><span>sequence-reverse</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-sgd-mom-update"><div class="inner"><span>sgd-mom-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-sgd-update"><div class="inner"><span>sgd-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-shape-array"><div class="inner"><span>shape-array</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-sigmoid"><div class="inner"><span>sigmoid</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-sign"><div class="inner"><span>sign</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-signsgd-update"><div class="inner"><span>signsgd-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-signum-update"><div class="inner"><span>signum-update</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-sin"><div class="inner"><span>sin</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-sinh"><div class="inner"><span>sinh</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-size-array"><div class="inner"><span>size-array</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-slice"><div class="inner"><span>slice</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-slice-axis"><div class="inner"><span>slice-axis</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-slice-channel"><div class="inner"><span>slice-channel</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-slice-like"><div class="inner"><span>slice-like</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-smooth-l1"><div class="inner"><span>smooth-l1</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmax"><div class="inner"><span>softmax</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmax-activation"><div class="inner"><span>softmax-activation</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmax-cross-entropy"><div class="inner"><span>softmax-cross-entropy</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmax-output"><div class="inner"><span>softmax-output</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-softmin"><div class="inner"><span>softmin</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-softsign"><div class="inner"><span>softsign</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-sort"><div class="inner"><span>sort</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-space-to-depth"><div class="inner"><span>space-to-depth</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-spatial-transformer"><div class="inner"><span>spatial-transformer</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-sqrt"><div class="inner"><span>sqrt</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-square"><div class="inner"><span>square</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-squeeze"><div class="inner"><span>squeeze</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-stack"><div class="inner"><span>stack</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-sum"><div class="inner"><span>sum</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-svm-output"><div class="inner"><span>svm-output</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-swap-axis"><div class="inner"><span>swap-axis</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-take"><div class="inner"><span>take</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-tan"><div class="inner"><span>tan</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-tanh"><div class="inner"><span>tanh</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-tile"><div class="inner"><span>tile</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-topk"><div class="inner"><span>topk</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-transpose"><div class="inner"><span>transpose</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-trunc"><div class="inner"><span>trunc</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-up-sampling"><div class="inner"><span>up-sampling</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-where"><div class="inner"><span>where</span></div></a></li><li class="depth-1"><a href="org.apache.clojure-mxnet.ndarray-api.html#var-zeros-like"><div class="inner"><span>zeros-like</span></div></a></li></ul></div><div class="namespace-docs" id="content"><h1 class="anchor" id="top">org.apache.clojure-mxnet.ndarray-api</h1><div class="doc"><pre class="plaintext">Experimental
-</pre></div><div class="public anchor" id="var--copy"><h3>-copy</h3><div class="usage"><code>(-copy {:keys [data out], :or {out nil}, :as opts})</code></div><div class="doc"><pre class="plaintext">Returns a copy of the input.
-
-From:src/operator/tensor/elemwise_unary_op_basic.cc:246
-
-`data`: The input array.
-`out`: Output array. (optional)</pre></div></div><div class="public anchor" id="var--linalg-det"><h3>-linalg-det</h3><div class="usage"><code>(-linalg-det {:keys [a out], :or {out nil}, :as opts})</code></div><div class="doc"><pre class="plaintext">Compute the determinant of a matrix.
-Input is a tensor *A* of dimension *n &gt;= 2*.
-
-If *n=2*, *A* is a square matrix. We compute:
-
-  *out* = *det(A)*
-
-If *n&gt;2*, *det* is performed separately on the trailing two dimensions
-for all inputs (batch mode).
-
-.. note:: The operator supports float32 and float64 data types only.
-.. note:: There is no gradient backwarded when A is non-invertible (which is
-          equivalent to det(A) = 0) because zero is rarely hit upon in float
-          point computation and the Jacobi's formula on determinant gradient
-          is not computationally efficient when A is non-invertible.
-
-Examples::
-
-   Single matrix determinant
-   A = [[1., 4.], [2., 3.]]
-   det(A) = [-5.]
-
-   Batch matrix determinant
-   A = [[[1., 4.], [2., 3.]],
-        [[2., 3.], [1., 4.]]]
-   det(A) = [-5., 5.]
-
-
-Defined in src/operator/tensor/la_op.cc:L970
-
-`a`: Tensor of square matrix
-`out`: Output array. (optional)</pre></div></div><div class="public anchor" id="var--linalg-extractdiag"><h3>-linalg-extractdiag</h3><div class="usage"><code>(-linalg-extractdiag {:keys [a offset out], :or {offset nil, out nil}, :as opts})</code></div><div class="doc"><pre class="plaintext">Extracts the diagonal entries of a square matrix.
-Input is a tensor *A* of dimension *n &gt;= 2*.
-
-If *n=2*, then *A* represents a single square matrix which diagonal elements get extracted as a 1-dimensional tensor.
-
-If *n&gt;2*, then *A* represents a batch of square matrices on the trailing two dimensions. The extracted diagonals are returned as an *n-1*-dimensional tensor.
-
-.. note:: The operator supports float32 and float64 data types only.
-
-Examples::
-
-    Single matrix diagonal extraction
-    A = [[1.0, 2.0],
-         [3.0, 4.0]]
-
-    extractdiag(A) = [1.0, 4.0]
-
-    extractdiag(A, 1) = [2.0]
-
-    Batch matrix diagonal extraction
-    A = [[[1.0, 2.0],
-          [3.0, 4.0]],
-         [[5.0, 6.0],
-          [7.0, 8.0]]]
-
-    extractdiag(A) = [[1.0, 4.0],
-                      [5.0, 8.0]]
-
-
-Defined in src/operator/tensor/la_op.cc:L495
-
-`a`: Tensor of square matrices
-`offset`: Offset of the diagonal versus the main diagonal. 0 corresponds to the main diagonal, a negative/positive value to diagonals below/above the main diagonal. (optional)
-`out`: Output array. (optional)</pre></div></div><div class="public anchor" id="var--linalg-extracttrian"><h3>-linalg-extracttrian</h3><div class="usage"><code>(-linalg-extracttrian {:keys [a offset lower out], :or {offset nil, lower nil, out nil}, :as opts})</code></div><div class="doc"><pre class="plaintext">Extracts a triangular sub-matrix from a square matrix.
-Input is a tensor *A* of dimension *n &gt;= 2*.
-
-If *n=2*, then *A* represents a single square matrix from which a triangular sub-matrix is extracted as a 1-dimensional tensor.
-
-If *n&gt;2*, then *A* represents a batch of square matrices on the trailing two dimensions. The extracted triangular sub-matrices are returned as an *n-1*-dimensional tensor.
-
-The *offset* and *lower* parameters determine the triangle to be extracted:
-
-- When *offset = 0* either the lower or upper triangle with respect to the main diagonal is extracted depending on the value of parameter *lower*.
-- When *offset = k &gt; 0* the upper triangle with respect to the k-th diagonal above the main diagonal is extracted. 
-- When *offset = k &lt; 0* the lower triangle with respect to the k-th diagonal below the main diagonal is extracted. 
-
-.. note:: The operator supports float32 and float64 data types only.
-
-Examples::
-
-    Single triagonal extraction
-    A = [[1.0, 2.0],
-         [3.0, 4.0]]
-
-    extracttrian(A) = [1.0, 3.0, 4.0]
-    extracttrian(A, lower=False) = [1.0, 2.0, 4.0]
-    extracttrian(A, 1) = [2.0]
-    extracttrian(A, -1) = [3.0]
-
-    Batch triagonal extraction
-    A = [[[1.0, 2.0],
-          [3.0, 4.0]],
-         [[5.0, 6.0],
-          [7.0, 8.0]]]
-
-    extracttrian(A) = [[1.0, 3.0, 4.0],
-                       [5.0, 7.0, 8.0]]
-
-
-Defined in src/operator/tensor/la_op.cc:L605
-
-`a`: Tensor of square matrices
-`offset`: Offset of the diagonal versus the main diagonal. 0 corresponds to the main diagonal, a negative/positive value to diagonals below/above the main diagonal. (optional)
-`lower`: Refer to the lower triangular matrix if lower=true, refer to the upper otherwise. Only relevant when offset=0 (optional)
-`out`: Output array. (optional)</pre></div></div><div class="public anchor" id="var--linalg-gelqf"><h3>-linalg-gelqf</h3><div class="usage"><code>(-linalg-gelqf {:keys [a out], :or {out nil}, :as opts})</code></div><div class="doc"><pre class="plaintext">LQ factorization for general matrix.
-Input is a tensor *A* of dimension *n &gt;= 2*.
-
-If *n=2*, we compute the LQ factorization (LAPACK *gelqf*, followed by *orglq*). *A*
-must have shape *(x, y)* with *x &lt;= y*, and must have full rank *=x*. The LQ
-factorization consists of *L* with shape *(x, x)* and *Q* with shape *(x, y)*, so
-that:
-
-   *A* = *L* \* *Q*
-
-Here, *L* is lower triangular (upper triangle equal to zero) with nonzero diagonal,
-and *Q* is row-orthonormal, meaning that
-
-   *Q* \* *Q*\ :sup:`T`
-
-is equal to the identity ma