blob: 7ac52737b30367d2fb9fd599adb75aa0de7d1d6b [file] [log] [blame]
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta charset="utf-8" />
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta http-equiv="x-ua-compatible" content="ie=edge">
<style>
.dropdown {
position: relative;
display: inline-block;
}
.dropdown-content {
display: none;
position: absolute;
background-color: #f9f9f9;
min-width: 160px;
box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2);
padding: 12px 16px;
z-index: 1;
text-align: left;
}
.dropdown:hover .dropdown-content {
display: block;
}
.dropdown-option:hover {
color: #FF4500;
}
.dropdown-option-active {
color: #FF4500;
font-weight: lighter;
}
.dropdown-option {
color: #000000;
font-weight: lighter;
}
.dropdown-header {
color: #FFFFFF;
display: inline-flex;
}
.dropdown-caret {
width: 18px;
height: 54px;
}
.dropdown-caret-path {
fill: #FFFFFF;
}
</style>
<title>Loss functions &#8212; Apache MXNet documentation</title>
<link rel="stylesheet" href="../../../../_static/basic.css" type="text/css" />
<link rel="stylesheet" href="../../../../_static/pygments.css" type="text/css" />
<link rel="stylesheet" type="text/css" href="../../../../_static/mxnet.css" />
<link rel="stylesheet" href="../../../../_static/material-design-lite-1.3.0/material.blue-deep_orange.min.css" type="text/css" />
<link rel="stylesheet" href="../../../../_static/sphinx_materialdesign_theme.css" type="text/css" />
<link rel="stylesheet" href="../../../../_static/fontawesome/all.css" type="text/css" />
<link rel="stylesheet" href="../../../../_static/fonts.css" type="text/css" />
<link rel="stylesheet" href="../../../../_static/feedback.css" type="text/css" />
<script id="documentation_options" data-url_root="../../../../" src="../../../../_static/documentation_options.js"></script>
<script src="../../../../_static/jquery.js"></script>
<script src="../../../../_static/underscore.js"></script>
<script src="../../../../_static/doctools.js"></script>
<script src="../../../../_static/language_data.js"></script>
<script src="../../../../_static/matomo_analytics.js"></script>
<script src="../../../../_static/autodoc.js"></script>
<script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script>
<script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/x-mathjax-config">MathJax.Hub.Config({"tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]], "processEscapes": true, "ignoreClass": "document", "processClass": "math|output_area"}})</script>
<script src="../../../../_static/sphinx_materialdesign_theme.js"></script>
<link rel="shortcut icon" href="../../../../_static/mxnet-icon.png"/>
<link rel="index" title="Index" href="../../../../genindex.html" />
<link rel="search" title="Search" href="../../../../search.html" />
<link rel="next" title="Text Tutorials" href="../text/index.html" />
<link rel="prev" title="Kullback-Leibler (KL) Divergence" href="kl_divergence.html" />
</head>
<body><header class="site-header" role="banner">
<div class="wrapper">
<a class="site-title" rel="author" href="/"><img
src="../../../../_static/mxnet_logo.png" class="site-header-logo"></a>
<nav class="site-nav">
<input type="checkbox" id="nav-trigger" class="nav-trigger"/>
<label for="nav-trigger">
<span class="menu-icon">
<svg viewBox="0 0 18 15" width="18px" height="15px">
<path d="M18,1.484c0,0.82-0.665,1.484-1.484,1.484H1.484C0.665,2.969,0,2.304,0,1.484l0,0C0,0.665,0.665,0,1.484,0 h15.032C17.335,0,18,0.665,18,1.484L18,1.484z M18,7.516C18,8.335,17.335,9,16.516,9H1.484C0.665,9,0,8.335,0,7.516l0,0 c0-0.82,0.665-1.484,1.484-1.484h15.032C17.335,6.031,18,6.696,18,7.516L18,7.516z M18,13.516C18,14.335,17.335,15,16.516,15H1.484 C0.665,15,0,14.335,0,13.516l0,0c0-0.82,0.665-1.483,1.484-1.483h15.032C17.335,12.031,18,12.695,18,13.516L18,13.516z"/>
</svg>
</span>
</label>
<div class="trigger">
<a class="page-link" href="/get_started">Get Started</a>
<a class="page-link" href="/features">Features</a>
<a class="page-link" href="/ecosystem">Ecosystem</a>
<a class="page-link page-current" href="/api">Docs & Tutorials</a>
<a class="page-link" href="/trusted_by">Trusted By</a>
<a class="page-link" href="https://github.com/apache/mxnet">GitHub</a>
<div class="dropdown" style="min-width:100px">
<span class="dropdown-header">Apache
<svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
</span>
<div class="dropdown-content" style="min-width:250px">
<a href="https://www.apache.org/foundation/">Apache Software Foundation</a>
<a href="https://incubator.apache.org/">Apache Incubator</a>
<a href="https://www.apache.org/licenses/">License</a>
<a href="/versions/1.9.1/api/faq/security.html">Security</a>
<a href="https://privacy.apache.org/policies/privacy-policy-public.html">Privacy</a>
<a href="https://www.apache.org/events/current-event">Events</a>
<a href="https://www.apache.org/foundation/sponsorship.html">Sponsorship</a>
<a href="https://www.apache.org/foundation/thanks.html">Thanks</a>
</div>
</div>
<div class="dropdown">
<span class="dropdown-header">master
<svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
</span>
<div class="dropdown-content">
<a class="dropdown-option-active" href="/versions/master/">master</a><br>
<a class="dropdown-option" href="/versions/1.9.1/">1.9.1</a><br>
<a class="dropdown-option" href="/versions/1.8.0/">1.8.0</a><br>
<a class="dropdown-option" href="/versions/1.7.0/">1.7.0</a><br>
<a class="dropdown-option" href="/versions/1.6.0/">1.6.0</a><br>
<a class="dropdown-option" href="/versions/1.5.0/">1.5.0</a><br>
<a class="dropdown-option" href="/versions/1.4.1/">1.4.1</a><br>
<a class="dropdown-option" href="/versions/1.3.1/">1.3.1</a><br>
<a class="dropdown-option" href="/versions/1.2.1/">1.2.1</a><br>
<a class="dropdown-option" href="/versions/1.1.0/">1.1.0</a><br>
<a class="dropdown-option" href="/versions/1.0.0/">1.0.0</a><br>
<a class="dropdown-option" href="/versions/0.12.1/">0.12.1</a><br>
<a class="dropdown-option" href="/versions/0.11.0/">0.11.0</a>
</div>
</div>
</div>
</nav>
</div>
</header>
<div class="mdl-layout mdl-js-layout mdl-layout--fixed-header mdl-layout--fixed-drawer"><header class="mdl-layout__header mdl-layout__header--waterfall ">
<div class="mdl-layout__header-row">
<nav class="mdl-navigation breadcrumb">
<a class="mdl-navigation__link" href="../../../index.html">Python Tutorials</a><i class="material-icons">navigate_next</i>
<a class="mdl-navigation__link" href="../../index.html">Packages</a><i class="material-icons">navigate_next</i>
<a class="mdl-navigation__link" href="../index.html">Gluon</a><i class="material-icons">navigate_next</i>
<a class="mdl-navigation__link" href="index.html">Losses</a><i class="material-icons">navigate_next</i>
<a class="mdl-navigation__link is-active">Loss functions</a>
</nav>
<div class="mdl-layout-spacer"></div>
<nav class="mdl-navigation">
<form class="form-inline pull-sm-right" action="../../../../search.html" method="get">
<div class="mdl-textfield mdl-js-textfield mdl-textfield--expandable mdl-textfield--floating-label mdl-textfield--align-right">
<label id="quick-search-icon" class="mdl-button mdl-js-button mdl-button--icon" for="waterfall-exp">
<i class="material-icons">search</i>
</label>
<div class="mdl-textfield__expandable-holder">
<input class="mdl-textfield__input" type="text" name="q" id="waterfall-exp" placeholder="Search" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</div>
</div>
<div class="mdl-tooltip" data-mdl-for="quick-search-icon">
Quick search
</div>
</form>
<a id="button-show-github"
href="https://github.com/apache/mxnet/edit/master/docs/python_docs/python/tutorials/packages/gluon/loss/loss.ipynb" class="mdl-button mdl-js-button mdl-button--icon">
<i class="material-icons">edit</i>
</a>
<div class="mdl-tooltip" data-mdl-for="button-show-github">
Edit on Github
</div>
</nav>
</div>
<div class="mdl-layout__header-row header-links">
<div class="mdl-layout-spacer"></div>
<nav class="mdl-navigation">
</nav>
</div>
</header><header class="mdl-layout__drawer">
<div class="globaltoc">
<span class="mdl-layout-title toc">Table Of Contents</span>
<nav class="mdl-navigation">
<ul class="current">
<li class="toctree-l1 current"><a class="reference internal" href="../../../index.html">Python Tutorials</a><ul class="current">
<li class="toctree-l2"><a class="reference internal" href="../../../getting-started/index.html">Getting Started</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../getting-started/crash-course/index.html">Crash Course</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/0-introduction.html">Introduction</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/1-nparray.html">Step 1: Manipulate data with NP on MXNet</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/2-create-nn.html">Step 2: Create a neural network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/3-autograd.html">Step 3: Automatic differentiation with autograd</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/4-components.html">Step 4: Necessary components that are not in the network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/5-datasets.html">Step 5: <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s and <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/5-datasets.html#Using-own-data-with-included-Datasets">Using own data with included <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/5-datasets.html#Using-your-own-data-with-custom-Datasets">Using your own data with custom <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/5-datasets.html#New-in-MXNet-2.0:-faster-C++-backend-dataloaders">New in MXNet 2.0: faster C++ backend dataloaders</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/6-train-nn.html">Step 6: Train a Neural Network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/7-use-gpus.html">Step 7: Load and Run a NN using GPU</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../getting-started/to-mxnet/index.html">Moving to MXNet from Other Frameworks</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/to-mxnet/pytorch.html">PyTorch vs Apache MXNet</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../getting-started/gluon_from_experiment_to_deployment.html">Gluon: from experiment to deployment</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../getting-started/gluon_migration_guide.html">Gluon2.0: Migration Guide</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../getting-started/logistic_regression_explained.html">Logistic regression explained</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/image/mnist.html">MNIST</a></li>
</ul>
</li>
<li class="toctree-l2 current"><a class="reference internal" href="../../index.html">Packages</a><ul class="current">
<li class="toctree-l3"><a class="reference internal" href="../../autograd/index.html">Automatic Differentiation</a></li>
<li class="toctree-l3 current"><a class="reference internal" href="../index.html">Gluon</a><ul class="current">
<li class="toctree-l4"><a class="reference internal" href="../blocks/index.html">Blocks</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../blocks/custom-layer.html">Custom Layers</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/hybridize.html">Hybridize</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/init.html">Initialization</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/naming.html">Parameter and Block Naming</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/nn.html">Layers and Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/parameters.html">Parameter Management</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/save_load_params.html">Saving and Loading Gluon Models</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/activations/activations.html">Activation Blocks</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../data/index.html">Data Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../data/data_augmentation.html">Image Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../data/datasets.html">Gluon <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s and <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
<li class="toctree-l5"><a class="reference internal" href="../data/datasets.html#Using-own-data-with-included-Datasets">Using own data with included <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../data/datasets.html#Using-own-data-with-custom-Datasets">Using own data with custom <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../data/datasets.html#Appendix:-Upgrading-from-Module-DataIter-to-Gluon-DataLoader">Appendix: Upgrading from Module <code class="docutils literal notranslate"><span class="pre">DataIter</span></code> to Gluon <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../image/index.html">Image Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../image/info_gan.html">Image similarity search with InfoGAN</a></li>
<li class="toctree-l5"><a class="reference internal" href="../image/mnist.html">Handwritten Digit Recognition</a></li>
</ul>
</li>
<li class="toctree-l4 current"><a class="reference internal" href="index.html">Losses</a><ul class="current">
<li class="toctree-l5"><a class="reference internal" href="custom-loss.html">Custom Loss Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="kl_divergence.html">Kullback-Leibler (KL) Divergence</a></li>
<li class="toctree-l5 current"><a class="current reference internal" href="#">Loss functions</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../text/index.html">Text Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../text/gnmt.html">Google Neural Machine Translation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../text/transformer.html">Machine Translation with Transformer</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../training/index.html">Training</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../training/fit_api_tutorial.html">MXNet Gluon Fit API</a></li>
<li class="toctree-l5"><a class="reference internal" href="../training/trainer.html">Trainer</a></li>
<li class="toctree-l5"><a class="reference internal" href="../training/learning_rates/index.html">Learning Rates</a><ul>
<li class="toctree-l6"><a class="reference internal" href="../training/learning_rates/learning_rate_finder.html">Learning Rate Finder</a></li>
<li class="toctree-l6"><a class="reference internal" href="../training/learning_rates/learning_rate_schedules.html">Learning Rate Schedules</a></li>
<li class="toctree-l6"><a class="reference internal" href="../training/learning_rates/learning_rate_schedules_advanced.html">Advanced Learning Rate Schedules</a></li>
</ul>
</li>
<li class="toctree-l5"><a class="reference internal" href="../training/normalization/index.html">Normalization Blocks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../kvstore/index.html">KVStore</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../kvstore/kvstore.html">Distributed Key-Value Store</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../legacy/index.html">Legacy</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../legacy/ndarray/index.html">NDArray</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../legacy/ndarray/01-ndarray-intro.html">An Intro: Manipulate Data the MXNet Way with NDArray</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../legacy/ndarray/02-ndarray-operations.html">NDArray Operations</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../legacy/ndarray/03-ndarray-contexts.html">NDArray Contexts</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../legacy/ndarray/gotchas_numpy_in_mxnet.html">Gotchas using NumPy in Apache MXNet</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../legacy/ndarray/sparse/index.html">Tutorials</a><ul>
<li class="toctree-l6"><a class="reference internal" href="../../legacy/ndarray/sparse/csr.html">CSRNDArray - NDArray in Compressed Sparse Row Storage Format</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../legacy/ndarray/sparse/row_sparse.html">RowSparseNDArray - NDArray for Sparse Gradient Updates</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../np/index.html">What is NP on MXNet</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../np/cheat-sheet.html">The NP on MXNet cheat sheet</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../np/np-vs-numpy.html">Differences between NP on MXNet and NumPy</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../onnx/index.html">ONNX</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../onnx/fine_tuning_gluon.html">Fine-tuning an ONNX model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../onnx/inference_on_onnx_model.html">Running inference on MXNet/Gluon from an ONNX model</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/deploy/export/onnx.html">Export ONNX Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../optimizer/index.html">Optimizers</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../viz/index.html">Visualization</a><ul>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/visualize_graph">Visualize networks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../performance/index.html">Performance</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../performance/compression/index.html">Compression</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../performance/compression/int8.html">Deploy with int-8</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/float16">Float16</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/gradient_compression">Gradient Compression</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/int8_inference.html">GluonCV with Quantized Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../performance/backend/index.html">Accelerated Backend Tools</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../performance/backend/dnnl/index.html">oneDNN</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../performance/backend/dnnl/dnnl_readme.html">Install MXNet with oneDNN</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../performance/backend/dnnl/dnnl_quantization.html">oneDNN Quantization</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../performance/backend/dnnl/dnnl_quantization_inc.html">Improving accuracy with Intel® Neural Compressor</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../performance/backend/tvm.html">Use TVM</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../performance/backend/profiler.html">Profiling MXNet Models</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../performance/backend/amp.html">Using AMP: Automatic Mixed Precision</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../deploy/index.html">Deployment</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../deploy/export/index.html">Export</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../deploy/export/onnx.html">Exporting to ONNX format</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/export_network.html">Export Gluon CV Models</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/blocks/save_load_params.html">Save / Load Parameters</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../deploy/inference/index.html">Inference</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../deploy/inference/cpp.html">Deploy into C++</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../deploy/inference/image_classification_jetson.html">Image Classication using pretrained ResNet-50 model on Jetson module</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../deploy/run-on-aws/index.html">Run on AWS</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../deploy/run-on-aws/use_ec2.html">Run on an EC2 Instance</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../deploy/run-on-aws/use_sagemaker.html">Run on Amazon SageMaker</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../deploy/run-on-aws/cloud.html">MXNet on the Cloud</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../extend/index.html">Extend</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../extend/customop.html">Custom Numpy Operators</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/new_op">New Operator Creation</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/add_op_in_backend">New Operator in MXNet Backend</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/using_rtc">Using RTC for CUDA kernels</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../../../api/index.html">Python API</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/np/index.html">mxnet.np</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/np/arrays.html">Array objects</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/arrays.ndarray.html">The N-dimensional array (<code class="xref py py-class docutils literal notranslate"><span class="pre">ndarray</span></code>)</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/arrays.indexing.html">Indexing</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/np/routines.html">Routines</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.array-creation.html">Array creation routines</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.eye.html">mxnet.np.eye</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.empty.html">mxnet.np.empty</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.full.html">mxnet.np.full</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.identity.html">mxnet.np.identity</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ones.html">mxnet.np.ones</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ones_like.html">mxnet.np.ones_like</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.zeros.html">mxnet.np.zeros</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.zeros_like.html">mxnet.np.zeros_like</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.array.html">mxnet.np.array</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.copy.html">mxnet.np.copy</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arange.html">mxnet.np.arange</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linspace.html">mxnet.np.linspace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.logspace.html">mxnet.np.logspace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.meshgrid.html">mxnet.np.meshgrid</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.tril.html">mxnet.np.tril</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.array-manipulation.html">Array manipulation routines</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.reshape.html">mxnet.np.reshape</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ravel.html">mxnet.np.ravel</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ndarray.flatten.html">mxnet.np.ndarray.flatten</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.swapaxes.html">mxnet.np.swapaxes</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ndarray.T.html">mxnet.np.ndarray.T</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.transpose.html">mxnet.np.transpose</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.moveaxis.html">mxnet.np.moveaxis</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.rollaxis.html">mxnet.np.rollaxis</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.expand_dims.html">mxnet.np.expand_dims</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.squeeze.html">mxnet.np.squeeze</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.broadcast_to.html">mxnet.np.broadcast_to</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.broadcast_arrays.html">mxnet.np.broadcast_arrays</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.atleast_1d.html">mxnet.np.atleast_1d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.atleast_2d.html">mxnet.np.atleast_2d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.atleast_3d.html">mxnet.np.atleast_3d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.concatenate.html">mxnet.np.concatenate</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.stack.html">mxnet.np.stack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.dstack.html">mxnet.np.dstack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.vstack.html">mxnet.np.vstack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.column_stack.html">mxnet.np.column_stack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.hstack.html">mxnet.np.hstack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.split.html">mxnet.np.split</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.hsplit.html">mxnet.np.hsplit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.vsplit.html">mxnet.np.vsplit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.array_split.html">mxnet.np.array_split</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.dsplit.html">mxnet.np.dsplit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.tile.html">mxnet.np.tile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.repeat.html">mxnet.np.repeat</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.unique.html">mxnet.np.unique</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.delete.html">mxnet.np.delete</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.insert.html">mxnet.np.insert</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.append.html">mxnet.np.append</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.resize.html">mxnet.np.resize</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.trim_zeros.html">mxnet.np.trim_zeros</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.reshape.html">mxnet.np.reshape</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.flip.html">mxnet.np.flip</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.roll.html">mxnet.np.roll</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.rot90.html">mxnet.np.rot90</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.fliplr.html">mxnet.np.fliplr</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.flipud.html">mxnet.np.flipud</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.io.html">Input and output</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.genfromtxt.html">mxnet.np.genfromtxt</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ndarray.tolist.html">mxnet.np.ndarray.tolist</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.set_printoptions.html">mxnet.np.set_printoptions</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.linalg.html">Linear algebra (<code class="xref py py-mod docutils literal notranslate"><span class="pre">numpy.linalg</span></code>)</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.dot.html">mxnet.np.dot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.vdot.html">mxnet.np.vdot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.inner.html">mxnet.np.inner</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.outer.html">mxnet.np.outer</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.tensordot.html">mxnet.np.tensordot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.einsum.html">mxnet.np.einsum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.multi_dot.html">mxnet.np.linalg.multi_dot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.matmul.html">mxnet.np.matmul</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.matrix_power.html">mxnet.np.linalg.matrix_power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.kron.html">mxnet.np.kron</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.svd.html">mxnet.np.linalg.svd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.cholesky.html">mxnet.np.linalg.cholesky</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.qr.html">mxnet.np.linalg.qr</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.eig.html">mxnet.np.linalg.eig</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.eigh.html">mxnet.np.linalg.eigh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.eigvals.html">mxnet.np.linalg.eigvals</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.eigvalsh.html">mxnet.np.linalg.eigvalsh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.norm.html">mxnet.np.linalg.norm</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.trace.html">mxnet.np.trace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.cond.html">mxnet.np.linalg.cond</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.det.html">mxnet.np.linalg.det</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.matrix_rank.html">mxnet.np.linalg.matrix_rank</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.slogdet.html">mxnet.np.linalg.slogdet</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.solve.html">mxnet.np.linalg.solve</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.tensorsolve.html">mxnet.np.linalg.tensorsolve</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.lstsq.html">mxnet.np.linalg.lstsq</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.inv.html">mxnet.np.linalg.inv</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.pinv.html">mxnet.np.linalg.pinv</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.tensorinv.html">mxnet.np.linalg.tensorinv</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.math.html">Mathematical functions</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.sin.html">mxnet.np.sin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cos.html">mxnet.np.cos</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.tan.html">mxnet.np.tan</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arcsin.html">mxnet.np.arcsin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arccos.html">mxnet.np.arccos</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arctan.html">mxnet.np.arctan</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.degrees.html">mxnet.np.degrees</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.radians.html">mxnet.np.radians</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.hypot.html">mxnet.np.hypot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arctan2.html">mxnet.np.arctan2</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.deg2rad.html">mxnet.np.deg2rad</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.rad2deg.html">mxnet.np.rad2deg</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.unwrap.html">mxnet.np.unwrap</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.sinh.html">mxnet.np.sinh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cosh.html">mxnet.np.cosh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.tanh.html">mxnet.np.tanh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arcsinh.html">mxnet.np.arcsinh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arccosh.html">mxnet.np.arccosh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arctanh.html">mxnet.np.arctanh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.rint.html">mxnet.np.rint</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.fix.html">mxnet.np.fix</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.floor.html">mxnet.np.floor</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ceil.html">mxnet.np.ceil</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.trunc.html">mxnet.np.trunc</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.around.html">mxnet.np.around</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.round_.html">mxnet.np.round_</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.sum.html">mxnet.np.sum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.prod.html">mxnet.np.prod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cumsum.html">mxnet.np.cumsum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanprod.html">mxnet.np.nanprod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nansum.html">mxnet.np.nansum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cumprod.html">mxnet.np.cumprod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nancumprod.html">mxnet.np.nancumprod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nancumsum.html">mxnet.np.nancumsum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.diff.html">mxnet.np.diff</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ediff1d.html">mxnet.np.ediff1d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cross.html">mxnet.np.cross</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.trapz.html">mxnet.np.trapz</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.exp.html">mxnet.np.exp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.expm1.html">mxnet.np.expm1</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.log.html">mxnet.np.log</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.log10.html">mxnet.np.log10</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.log2.html">mxnet.np.log2</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.log1p.html">mxnet.np.log1p</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.logaddexp.html">mxnet.np.logaddexp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.i0.html">mxnet.np.i0</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ldexp.html">mxnet.np.ldexp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.signbit.html">mxnet.np.signbit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.copysign.html">mxnet.np.copysign</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.frexp.html">mxnet.np.frexp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.spacing.html">mxnet.np.spacing</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.lcm.html">mxnet.np.lcm</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.gcd.html">mxnet.np.gcd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.add.html">mxnet.np.add</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.reciprocal.html">mxnet.np.reciprocal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.negative.html">mxnet.np.negative</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.divide.html">mxnet.np.divide</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.power.html">mxnet.np.power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.subtract.html">mxnet.np.subtract</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.mod.html">mxnet.np.mod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.multiply.html">mxnet.np.multiply</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.true_divide.html">mxnet.np.true_divide</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.remainder.html">mxnet.np.remainder</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.positive.html">mxnet.np.positive</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.float_power.html">mxnet.np.float_power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.fmod.html">mxnet.np.fmod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.modf.html">mxnet.np.modf</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.divmod.html">mxnet.np.divmod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.floor_divide.html">mxnet.np.floor_divide</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.clip.html">mxnet.np.clip</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.sqrt.html">mxnet.np.sqrt</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cbrt.html">mxnet.np.cbrt</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.square.html">mxnet.np.square</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.absolute.html">mxnet.np.absolute</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.sign.html">mxnet.np.sign</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.maximum.html">mxnet.np.maximum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.minimum.html">mxnet.np.minimum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.fabs.html">mxnet.np.fabs</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.heaviside.html">mxnet.np.heaviside</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.fmax.html">mxnet.np.fmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.fmin.html">mxnet.np.fmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nan_to_num.html">mxnet.np.nan_to_num</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.interp.html">mxnet.np.interp</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/random/index.html">np.random</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.choice.html">mxnet.np.random.choice</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.shuffle.html">mxnet.np.random.shuffle</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.normal.html">mxnet.np.random.normal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.uniform.html">mxnet.np.random.uniform</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.rand.html">mxnet.np.random.rand</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.randint.html">mxnet.np.random.randint</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.beta.html">mxnet.np.random.beta</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.chisquare.html">mxnet.np.random.chisquare</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.exponential.html">mxnet.np.random.exponential</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.f.html">mxnet.np.random.f</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.gamma.html">mxnet.np.random.gamma</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.gumbel.html">mxnet.np.random.gumbel</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.laplace.html">mxnet.np.random.laplace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.logistic.html">mxnet.np.random.logistic</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.lognormal.html">mxnet.np.random.lognormal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.multinomial.html">mxnet.np.random.multinomial</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.multivariate_normal.html">mxnet.np.random.multivariate_normal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.pareto.html">mxnet.np.random.pareto</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.power.html">mxnet.np.random.power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.rayleigh.html">mxnet.np.random.rayleigh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.weibull.html">mxnet.np.random.weibull</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.sort.html">Sorting, searching, and counting</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ndarray.sort.html">mxnet.np.ndarray.sort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.sort.html">mxnet.np.sort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.lexsort.html">mxnet.np.lexsort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.argsort.html">mxnet.np.argsort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.msort.html">mxnet.np.msort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.partition.html">mxnet.np.partition</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.argpartition.html">mxnet.np.argpartition</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.argmax.html">mxnet.np.argmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.argmin.html">mxnet.np.argmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanargmax.html">mxnet.np.nanargmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanargmin.html">mxnet.np.nanargmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.argwhere.html">mxnet.np.argwhere</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nonzero.html">mxnet.np.nonzero</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.flatnonzero.html">mxnet.np.flatnonzero</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.where.html">mxnet.np.where</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.searchsorted.html">mxnet.np.searchsorted</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.extract.html">mxnet.np.extract</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.count_nonzero.html">mxnet.np.count_nonzero</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.statistics.html">Statistics</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.min.html">mxnet.np.min</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.max.html">mxnet.np.max</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.amin.html">mxnet.np.amin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.amax.html">mxnet.np.amax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanmin.html">mxnet.np.nanmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanmax.html">mxnet.np.nanmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ptp.html">mxnet.np.ptp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.percentile.html">mxnet.np.percentile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanpercentile.html">mxnet.np.nanpercentile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.quantile.html">mxnet.np.quantile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanquantile.html">mxnet.np.nanquantile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.mean.html">mxnet.np.mean</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.std.html">mxnet.np.std</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.var.html">mxnet.np.var</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.median.html">mxnet.np.median</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.average.html">mxnet.np.average</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanmedian.html">mxnet.np.nanmedian</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanstd.html">mxnet.np.nanstd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanvar.html">mxnet.np.nanvar</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.corrcoef.html">mxnet.np.corrcoef</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.correlate.html">mxnet.np.correlate</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cov.html">mxnet.np.cov</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.histogram.html">mxnet.np.histogram</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.histogram2d.html">mxnet.np.histogram2d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.histogramdd.html">mxnet.np.histogramdd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.bincount.html">mxnet.np.bincount</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.histogram_bin_edges.html">mxnet.np.histogram_bin_edges</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.digitize.html">mxnet.np.digitize</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/npx/index.html">NPX: NumPy Neural Network Extension</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.set_np.html">mxnet.npx.set_np</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.reset_np.html">mxnet.npx.reset_np</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.cpu.html">mxnet.npx.cpu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.cpu_pinned.html">mxnet.npx.cpu_pinned</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.gpu.html">mxnet.npx.gpu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.gpu_memory_info.html">mxnet.npx.gpu_memory_info</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.current_device.html">mxnet.npx.current_device</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.num_gpus.html">mxnet.npx.num_gpus</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.activation.html">mxnet.npx.activation</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.batch_norm.html">mxnet.npx.batch_norm</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.convolution.html">mxnet.npx.convolution</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.dropout.html">mxnet.npx.dropout</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.embedding.html">mxnet.npx.embedding</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.fully_connected.html">mxnet.npx.fully_connected</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.layer_norm.html">mxnet.npx.layer_norm</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.pooling.html">mxnet.npx.pooling</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.rnn.html">mxnet.npx.rnn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.leaky_relu.html">mxnet.npx.leaky_relu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.multibox_detection.html">mxnet.npx.multibox_detection</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.multibox_prior.html">mxnet.npx.multibox_prior</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.multibox_target.html">mxnet.npx.multibox_target</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.roi_pooling.html">mxnet.npx.roi_pooling</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.sigmoid.html">mxnet.npx.sigmoid</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.relu.html">mxnet.npx.relu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.smooth_l1.html">mxnet.npx.smooth_l1</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.softmax.html">mxnet.npx.softmax</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.log_softmax.html">mxnet.npx.log_softmax</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.topk.html">mxnet.npx.topk</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.waitall.html">mxnet.npx.waitall</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.load.html">mxnet.npx.load</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.save.html">mxnet.npx.save</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.one_hot.html">mxnet.npx.one_hot</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.pick.html">mxnet.npx.pick</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.reshape_like.html">mxnet.npx.reshape_like</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.batch_flatten.html">mxnet.npx.batch_flatten</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.batch_dot.html">mxnet.npx.batch_dot</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.gamma.html">mxnet.npx.gamma</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.sequence_mask.html">mxnet.npx.sequence_mask</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/gluon/index.html">mxnet.gluon</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/block.html">gluon.Block</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/hybrid_block.html">gluon.HybridBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/symbol_block.html">gluon.SymbolBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/constant.html">gluon.Constant</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/parameter.html">gluon.Parameter</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/trainer.html">gluon.Trainer</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/contrib/index.html">gluon.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/data/index.html">gluon.data</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/gluon/data/vision/index.html">data.vision</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/gluon/data/vision/datasets/index.html">vision.datasets</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/gluon/data/vision/transforms/index.html">vision.transforms</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/loss/index.html">gluon.loss</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/metric/index.html">gluon.metric</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/model_zoo/index.html">gluon.model_zoo.vision</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/nn/index.html">gluon.nn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/rnn/index.html">gluon.rnn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/utils/index.html">gluon.utils</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/autograd/index.html">mxnet.autograd</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/initializer/index.html">mxnet.initializer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/optimizer/index.html">mxnet.optimizer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/lr_scheduler/index.html">mxnet.lr_scheduler</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/kvstore/index.html">KVStore: Communication for Distributed Training</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/kvstore/index.html#horovod">Horovod</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/kvstore/generated/mxnet.kvstore.Horovod.html">mxnet.kvstore.Horovod</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/kvstore/index.html#byteps">BytePS</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/kvstore/generated/mxnet.kvstore.BytePS.html">mxnet.kvstore.BytePS</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/kvstore/index.html#kvstore-interface">KVStore Interface</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/kvstore/generated/mxnet.kvstore.KVStore.html">mxnet.kvstore.KVStore</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/kvstore/generated/mxnet.kvstore.KVStoreBase.html">mxnet.kvstore.KVStoreBase</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/kvstore/generated/mxnet.kvstore.KVStoreServer.html">mxnet.kvstore.KVStoreServer</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/contrib/index.html">mxnet.contrib</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/io/index.html">contrib.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/ndarray/index.html">contrib.ndarray</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/onnx/index.html">contrib.onnx</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/quantization/index.html">contrib.quantization</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/symbol/index.html">contrib.symbol</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/tensorboard/index.html">contrib.tensorboard</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/tensorrt/index.html">contrib.tensorrt</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/text/index.html">contrib.text</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/legacy/index.html">Legacy</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/callback/index.html">mxnet.callback</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/image/index.html">mxnet.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/io/index.html">mxnet.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/ndarray/index.html">mxnet.ndarray</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/ndarray.html">ndarray</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/contrib/index.html">ndarray.contrib</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/image/index.html">ndarray.image</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/linalg/index.html">ndarray.linalg</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/op/index.html">ndarray.op</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/random/index.html">ndarray.random</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/register/index.html">ndarray.register</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/sparse/index.html">ndarray.sparse</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/utils/index.html">ndarray.utils</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/recordio/index.html">mxnet.recordio</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/symbol/index.html">mxnet.symbol</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/symbol.html">symbol</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/contrib/index.html">symbol.contrib</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/image/index.html">symbol.image</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/linalg/index.html">symbol.linalg</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/op/index.html">symbol.op</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/random/index.html">symbol.random</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/register/index.html">symbol.register</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/sparse/index.html">symbol.sparse</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/visualization/index.html">mxnet.visualization</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/device/index.html">mxnet.device</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/engine/index.html">mxnet.engine</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/executor/index.html">mxnet.executor</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/kvstore_server/index.html">mxnet.kvstore_server</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/profiler/index.html">mxnet.profiler</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/rtc/index.html">mxnet.rtc</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/runtime/index.html">mxnet.runtime</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/runtime/generated/mxnet.runtime.Feature.html">mxnet.runtime.Feature</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/runtime/generated/mxnet.runtime.Features.html">mxnet.runtime.Features</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/runtime/generated/mxnet.runtime.feature_list.html">mxnet.runtime.feature_list</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/test_utils/index.html">mxnet.test_utils</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/util/index.html">mxnet.util</a></li>
</ul>
</li>
</ul>
</nav>
</div>
</header>
<main class="mdl-layout__content" tabIndex="0">
<header class="mdl-layout__drawer">
<div class="globaltoc">
<span class="mdl-layout-title toc">Table Of Contents</span>
<nav class="mdl-navigation">
<ul class="current">
<li class="toctree-l1 current"><a class="reference internal" href="../../../index.html">Python Tutorials</a><ul class="current">
<li class="toctree-l2"><a class="reference internal" href="../../../getting-started/index.html">Getting Started</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../getting-started/crash-course/index.html">Crash Course</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/0-introduction.html">Introduction</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/1-nparray.html">Step 1: Manipulate data with NP on MXNet</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/2-create-nn.html">Step 2: Create a neural network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/3-autograd.html">Step 3: Automatic differentiation with autograd</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/4-components.html">Step 4: Necessary components that are not in the network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/5-datasets.html">Step 5: <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s and <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/5-datasets.html#Using-own-data-with-included-Datasets">Using own data with included <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/5-datasets.html#Using-your-own-data-with-custom-Datasets">Using your own data with custom <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/5-datasets.html#New-in-MXNet-2.0:-faster-C++-backend-dataloaders">New in MXNet 2.0: faster C++ backend dataloaders</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/6-train-nn.html">Step 6: Train a Neural Network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/crash-course/7-use-gpus.html">Step 7: Load and Run a NN using GPU</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../getting-started/to-mxnet/index.html">Moving to MXNet from Other Frameworks</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../getting-started/to-mxnet/pytorch.html">PyTorch vs Apache MXNet</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../getting-started/gluon_from_experiment_to_deployment.html">Gluon: from experiment to deployment</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../getting-started/gluon_migration_guide.html">Gluon2.0: Migration Guide</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../getting-started/logistic_regression_explained.html">Logistic regression explained</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/image/mnist.html">MNIST</a></li>
</ul>
</li>
<li class="toctree-l2 current"><a class="reference internal" href="../../index.html">Packages</a><ul class="current">
<li class="toctree-l3"><a class="reference internal" href="../../autograd/index.html">Automatic Differentiation</a></li>
<li class="toctree-l3 current"><a class="reference internal" href="../index.html">Gluon</a><ul class="current">
<li class="toctree-l4"><a class="reference internal" href="../blocks/index.html">Blocks</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../blocks/custom-layer.html">Custom Layers</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/hybridize.html">Hybridize</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/init.html">Initialization</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/naming.html">Parameter and Block Naming</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/nn.html">Layers and Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/parameters.html">Parameter Management</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/save_load_params.html">Saving and Loading Gluon Models</a></li>
<li class="toctree-l5"><a class="reference internal" href="../blocks/activations/activations.html">Activation Blocks</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../data/index.html">Data Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../data/data_augmentation.html">Image Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../data/datasets.html">Gluon <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s and <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
<li class="toctree-l5"><a class="reference internal" href="../data/datasets.html#Using-own-data-with-included-Datasets">Using own data with included <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../data/datasets.html#Using-own-data-with-custom-Datasets">Using own data with custom <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../data/datasets.html#Appendix:-Upgrading-from-Module-DataIter-to-Gluon-DataLoader">Appendix: Upgrading from Module <code class="docutils literal notranslate"><span class="pre">DataIter</span></code> to Gluon <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../image/index.html">Image Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../image/info_gan.html">Image similarity search with InfoGAN</a></li>
<li class="toctree-l5"><a class="reference internal" href="../image/mnist.html">Handwritten Digit Recognition</a></li>
</ul>
</li>
<li class="toctree-l4 current"><a class="reference internal" href="index.html">Losses</a><ul class="current">
<li class="toctree-l5"><a class="reference internal" href="custom-loss.html">Custom Loss Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="kl_divergence.html">Kullback-Leibler (KL) Divergence</a></li>
<li class="toctree-l5 current"><a class="current reference internal" href="#">Loss functions</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../text/index.html">Text Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../text/gnmt.html">Google Neural Machine Translation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../text/transformer.html">Machine Translation with Transformer</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../training/index.html">Training</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../training/fit_api_tutorial.html">MXNet Gluon Fit API</a></li>
<li class="toctree-l5"><a class="reference internal" href="../training/trainer.html">Trainer</a></li>
<li class="toctree-l5"><a class="reference internal" href="../training/learning_rates/index.html">Learning Rates</a><ul>
<li class="toctree-l6"><a class="reference internal" href="../training/learning_rates/learning_rate_finder.html">Learning Rate Finder</a></li>
<li class="toctree-l6"><a class="reference internal" href="../training/learning_rates/learning_rate_schedules.html">Learning Rate Schedules</a></li>
<li class="toctree-l6"><a class="reference internal" href="../training/learning_rates/learning_rate_schedules_advanced.html">Advanced Learning Rate Schedules</a></li>
</ul>
</li>
<li class="toctree-l5"><a class="reference internal" href="../training/normalization/index.html">Normalization Blocks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../kvstore/index.html">KVStore</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../kvstore/kvstore.html">Distributed Key-Value Store</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../legacy/index.html">Legacy</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../legacy/ndarray/index.html">NDArray</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../legacy/ndarray/01-ndarray-intro.html">An Intro: Manipulate Data the MXNet Way with NDArray</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../legacy/ndarray/02-ndarray-operations.html">NDArray Operations</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../legacy/ndarray/03-ndarray-contexts.html">NDArray Contexts</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../legacy/ndarray/gotchas_numpy_in_mxnet.html">Gotchas using NumPy in Apache MXNet</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../legacy/ndarray/sparse/index.html">Tutorials</a><ul>
<li class="toctree-l6"><a class="reference internal" href="../../legacy/ndarray/sparse/csr.html">CSRNDArray - NDArray in Compressed Sparse Row Storage Format</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../legacy/ndarray/sparse/row_sparse.html">RowSparseNDArray - NDArray for Sparse Gradient Updates</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../np/index.html">What is NP on MXNet</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../np/cheat-sheet.html">The NP on MXNet cheat sheet</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../np/np-vs-numpy.html">Differences between NP on MXNet and NumPy</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../onnx/index.html">ONNX</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../onnx/fine_tuning_gluon.html">Fine-tuning an ONNX model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../onnx/inference_on_onnx_model.html">Running inference on MXNet/Gluon from an ONNX model</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/deploy/export/onnx.html">Export ONNX Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../optimizer/index.html">Optimizers</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../viz/index.html">Visualization</a><ul>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/visualize_graph">Visualize networks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../performance/index.html">Performance</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../performance/compression/index.html">Compression</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../performance/compression/int8.html">Deploy with int-8</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/float16">Float16</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/gradient_compression">Gradient Compression</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/int8_inference.html">GluonCV with Quantized Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../performance/backend/index.html">Accelerated Backend Tools</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../performance/backend/dnnl/index.html">oneDNN</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../performance/backend/dnnl/dnnl_readme.html">Install MXNet with oneDNN</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../performance/backend/dnnl/dnnl_quantization.html">oneDNN Quantization</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../performance/backend/dnnl/dnnl_quantization_inc.html">Improving accuracy with Intel® Neural Compressor</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../performance/backend/tvm.html">Use TVM</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../performance/backend/profiler.html">Profiling MXNet Models</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../performance/backend/amp.html">Using AMP: Automatic Mixed Precision</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../deploy/index.html">Deployment</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../deploy/export/index.html">Export</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../deploy/export/onnx.html">Exporting to ONNX format</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/export_network.html">Export Gluon CV Models</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/blocks/save_load_params.html">Save / Load Parameters</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../deploy/inference/index.html">Inference</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../deploy/inference/cpp.html">Deploy into C++</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../deploy/inference/image_classification_jetson.html">Image Classication using pretrained ResNet-50 model on Jetson module</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../deploy/run-on-aws/index.html">Run on AWS</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../deploy/run-on-aws/use_ec2.html">Run on an EC2 Instance</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../deploy/run-on-aws/use_sagemaker.html">Run on Amazon SageMaker</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../deploy/run-on-aws/cloud.html">MXNet on the Cloud</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../extend/index.html">Extend</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../extend/customop.html">Custom Numpy Operators</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/new_op">New Operator Creation</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/add_op_in_backend">New Operator in MXNet Backend</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/using_rtc">Using RTC for CUDA kernels</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../../../api/index.html">Python API</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/np/index.html">mxnet.np</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/np/arrays.html">Array objects</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/arrays.ndarray.html">The N-dimensional array (<code class="xref py py-class docutils literal notranslate"><span class="pre">ndarray</span></code>)</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/arrays.indexing.html">Indexing</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/np/routines.html">Routines</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.array-creation.html">Array creation routines</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.eye.html">mxnet.np.eye</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.empty.html">mxnet.np.empty</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.full.html">mxnet.np.full</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.identity.html">mxnet.np.identity</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ones.html">mxnet.np.ones</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ones_like.html">mxnet.np.ones_like</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.zeros.html">mxnet.np.zeros</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.zeros_like.html">mxnet.np.zeros_like</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.array.html">mxnet.np.array</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.copy.html">mxnet.np.copy</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arange.html">mxnet.np.arange</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linspace.html">mxnet.np.linspace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.logspace.html">mxnet.np.logspace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.meshgrid.html">mxnet.np.meshgrid</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.tril.html">mxnet.np.tril</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.array-manipulation.html">Array manipulation routines</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.reshape.html">mxnet.np.reshape</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ravel.html">mxnet.np.ravel</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ndarray.flatten.html">mxnet.np.ndarray.flatten</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.swapaxes.html">mxnet.np.swapaxes</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ndarray.T.html">mxnet.np.ndarray.T</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.transpose.html">mxnet.np.transpose</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.moveaxis.html">mxnet.np.moveaxis</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.rollaxis.html">mxnet.np.rollaxis</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.expand_dims.html">mxnet.np.expand_dims</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.squeeze.html">mxnet.np.squeeze</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.broadcast_to.html">mxnet.np.broadcast_to</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.broadcast_arrays.html">mxnet.np.broadcast_arrays</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.atleast_1d.html">mxnet.np.atleast_1d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.atleast_2d.html">mxnet.np.atleast_2d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.atleast_3d.html">mxnet.np.atleast_3d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.concatenate.html">mxnet.np.concatenate</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.stack.html">mxnet.np.stack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.dstack.html">mxnet.np.dstack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.vstack.html">mxnet.np.vstack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.column_stack.html">mxnet.np.column_stack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.hstack.html">mxnet.np.hstack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.split.html">mxnet.np.split</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.hsplit.html">mxnet.np.hsplit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.vsplit.html">mxnet.np.vsplit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.array_split.html">mxnet.np.array_split</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.dsplit.html">mxnet.np.dsplit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.tile.html">mxnet.np.tile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.repeat.html">mxnet.np.repeat</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.unique.html">mxnet.np.unique</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.delete.html">mxnet.np.delete</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.insert.html">mxnet.np.insert</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.append.html">mxnet.np.append</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.resize.html">mxnet.np.resize</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.trim_zeros.html">mxnet.np.trim_zeros</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.reshape.html">mxnet.np.reshape</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.flip.html">mxnet.np.flip</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.roll.html">mxnet.np.roll</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.rot90.html">mxnet.np.rot90</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.fliplr.html">mxnet.np.fliplr</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.flipud.html">mxnet.np.flipud</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.io.html">Input and output</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.genfromtxt.html">mxnet.np.genfromtxt</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ndarray.tolist.html">mxnet.np.ndarray.tolist</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.set_printoptions.html">mxnet.np.set_printoptions</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.linalg.html">Linear algebra (<code class="xref py py-mod docutils literal notranslate"><span class="pre">numpy.linalg</span></code>)</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.dot.html">mxnet.np.dot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.vdot.html">mxnet.np.vdot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.inner.html">mxnet.np.inner</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.outer.html">mxnet.np.outer</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.tensordot.html">mxnet.np.tensordot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.einsum.html">mxnet.np.einsum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.multi_dot.html">mxnet.np.linalg.multi_dot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.matmul.html">mxnet.np.matmul</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.matrix_power.html">mxnet.np.linalg.matrix_power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.kron.html">mxnet.np.kron</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.svd.html">mxnet.np.linalg.svd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.cholesky.html">mxnet.np.linalg.cholesky</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.qr.html">mxnet.np.linalg.qr</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.eig.html">mxnet.np.linalg.eig</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.eigh.html">mxnet.np.linalg.eigh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.eigvals.html">mxnet.np.linalg.eigvals</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.eigvalsh.html">mxnet.np.linalg.eigvalsh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.norm.html">mxnet.np.linalg.norm</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.trace.html">mxnet.np.trace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.cond.html">mxnet.np.linalg.cond</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.det.html">mxnet.np.linalg.det</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.matrix_rank.html">mxnet.np.linalg.matrix_rank</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.slogdet.html">mxnet.np.linalg.slogdet</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.solve.html">mxnet.np.linalg.solve</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.tensorsolve.html">mxnet.np.linalg.tensorsolve</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.lstsq.html">mxnet.np.linalg.lstsq</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.inv.html">mxnet.np.linalg.inv</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.pinv.html">mxnet.np.linalg.pinv</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.linalg.tensorinv.html">mxnet.np.linalg.tensorinv</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.math.html">Mathematical functions</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.sin.html">mxnet.np.sin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cos.html">mxnet.np.cos</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.tan.html">mxnet.np.tan</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arcsin.html">mxnet.np.arcsin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arccos.html">mxnet.np.arccos</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arctan.html">mxnet.np.arctan</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.degrees.html">mxnet.np.degrees</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.radians.html">mxnet.np.radians</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.hypot.html">mxnet.np.hypot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arctan2.html">mxnet.np.arctan2</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.deg2rad.html">mxnet.np.deg2rad</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.rad2deg.html">mxnet.np.rad2deg</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.unwrap.html">mxnet.np.unwrap</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.sinh.html">mxnet.np.sinh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cosh.html">mxnet.np.cosh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.tanh.html">mxnet.np.tanh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arcsinh.html">mxnet.np.arcsinh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arccosh.html">mxnet.np.arccosh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.arctanh.html">mxnet.np.arctanh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.rint.html">mxnet.np.rint</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.fix.html">mxnet.np.fix</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.floor.html">mxnet.np.floor</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ceil.html">mxnet.np.ceil</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.trunc.html">mxnet.np.trunc</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.around.html">mxnet.np.around</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.round_.html">mxnet.np.round_</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.sum.html">mxnet.np.sum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.prod.html">mxnet.np.prod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cumsum.html">mxnet.np.cumsum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanprod.html">mxnet.np.nanprod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nansum.html">mxnet.np.nansum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cumprod.html">mxnet.np.cumprod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nancumprod.html">mxnet.np.nancumprod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nancumsum.html">mxnet.np.nancumsum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.diff.html">mxnet.np.diff</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ediff1d.html">mxnet.np.ediff1d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cross.html">mxnet.np.cross</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.trapz.html">mxnet.np.trapz</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.exp.html">mxnet.np.exp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.expm1.html">mxnet.np.expm1</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.log.html">mxnet.np.log</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.log10.html">mxnet.np.log10</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.log2.html">mxnet.np.log2</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.log1p.html">mxnet.np.log1p</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.logaddexp.html">mxnet.np.logaddexp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.i0.html">mxnet.np.i0</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ldexp.html">mxnet.np.ldexp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.signbit.html">mxnet.np.signbit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.copysign.html">mxnet.np.copysign</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.frexp.html">mxnet.np.frexp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.spacing.html">mxnet.np.spacing</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.lcm.html">mxnet.np.lcm</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.gcd.html">mxnet.np.gcd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.add.html">mxnet.np.add</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.reciprocal.html">mxnet.np.reciprocal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.negative.html">mxnet.np.negative</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.divide.html">mxnet.np.divide</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.power.html">mxnet.np.power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.subtract.html">mxnet.np.subtract</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.mod.html">mxnet.np.mod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.multiply.html">mxnet.np.multiply</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.true_divide.html">mxnet.np.true_divide</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.remainder.html">mxnet.np.remainder</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.positive.html">mxnet.np.positive</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.float_power.html">mxnet.np.float_power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.fmod.html">mxnet.np.fmod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.modf.html">mxnet.np.modf</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.divmod.html">mxnet.np.divmod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.floor_divide.html">mxnet.np.floor_divide</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.clip.html">mxnet.np.clip</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.sqrt.html">mxnet.np.sqrt</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cbrt.html">mxnet.np.cbrt</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.square.html">mxnet.np.square</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.absolute.html">mxnet.np.absolute</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.sign.html">mxnet.np.sign</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.maximum.html">mxnet.np.maximum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.minimum.html">mxnet.np.minimum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.fabs.html">mxnet.np.fabs</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.heaviside.html">mxnet.np.heaviside</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.fmax.html">mxnet.np.fmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.fmin.html">mxnet.np.fmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nan_to_num.html">mxnet.np.nan_to_num</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.interp.html">mxnet.np.interp</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/random/index.html">np.random</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.choice.html">mxnet.np.random.choice</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.shuffle.html">mxnet.np.random.shuffle</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.normal.html">mxnet.np.random.normal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.uniform.html">mxnet.np.random.uniform</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.rand.html">mxnet.np.random.rand</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.randint.html">mxnet.np.random.randint</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.beta.html">mxnet.np.random.beta</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.chisquare.html">mxnet.np.random.chisquare</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.exponential.html">mxnet.np.random.exponential</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.f.html">mxnet.np.random.f</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.gamma.html">mxnet.np.random.gamma</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.gumbel.html">mxnet.np.random.gumbel</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.laplace.html">mxnet.np.random.laplace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.logistic.html">mxnet.np.random.logistic</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.lognormal.html">mxnet.np.random.lognormal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.multinomial.html">mxnet.np.random.multinomial</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.multivariate_normal.html">mxnet.np.random.multivariate_normal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.pareto.html">mxnet.np.random.pareto</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.power.html">mxnet.np.random.power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.rayleigh.html">mxnet.np.random.rayleigh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/random/generated/mxnet.np.random.weibull.html">mxnet.np.random.weibull</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.sort.html">Sorting, searching, and counting</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ndarray.sort.html">mxnet.np.ndarray.sort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.sort.html">mxnet.np.sort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.lexsort.html">mxnet.np.lexsort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.argsort.html">mxnet.np.argsort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.msort.html">mxnet.np.msort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.partition.html">mxnet.np.partition</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.argpartition.html">mxnet.np.argpartition</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.argmax.html">mxnet.np.argmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.argmin.html">mxnet.np.argmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanargmax.html">mxnet.np.nanargmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanargmin.html">mxnet.np.nanargmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.argwhere.html">mxnet.np.argwhere</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nonzero.html">mxnet.np.nonzero</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.flatnonzero.html">mxnet.np.flatnonzero</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.where.html">mxnet.np.where</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.searchsorted.html">mxnet.np.searchsorted</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.extract.html">mxnet.np.extract</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.count_nonzero.html">mxnet.np.count_nonzero</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/np/routines.statistics.html">Statistics</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.min.html">mxnet.np.min</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.max.html">mxnet.np.max</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.amin.html">mxnet.np.amin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.amax.html">mxnet.np.amax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanmin.html">mxnet.np.nanmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanmax.html">mxnet.np.nanmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.ptp.html">mxnet.np.ptp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.percentile.html">mxnet.np.percentile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanpercentile.html">mxnet.np.nanpercentile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.quantile.html">mxnet.np.quantile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanquantile.html">mxnet.np.nanquantile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.mean.html">mxnet.np.mean</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.std.html">mxnet.np.std</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.var.html">mxnet.np.var</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.median.html">mxnet.np.median</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.average.html">mxnet.np.average</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanmedian.html">mxnet.np.nanmedian</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanstd.html">mxnet.np.nanstd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.nanvar.html">mxnet.np.nanvar</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.corrcoef.html">mxnet.np.corrcoef</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.correlate.html">mxnet.np.correlate</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.cov.html">mxnet.np.cov</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.histogram.html">mxnet.np.histogram</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.histogram2d.html">mxnet.np.histogram2d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.histogramdd.html">mxnet.np.histogramdd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.bincount.html">mxnet.np.bincount</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.histogram_bin_edges.html">mxnet.np.histogram_bin_edges</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/np/generated/mxnet.np.digitize.html">mxnet.np.digitize</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/npx/index.html">NPX: NumPy Neural Network Extension</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.set_np.html">mxnet.npx.set_np</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.reset_np.html">mxnet.npx.reset_np</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.cpu.html">mxnet.npx.cpu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.cpu_pinned.html">mxnet.npx.cpu_pinned</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.gpu.html">mxnet.npx.gpu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.gpu_memory_info.html">mxnet.npx.gpu_memory_info</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.current_device.html">mxnet.npx.current_device</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.num_gpus.html">mxnet.npx.num_gpus</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.activation.html">mxnet.npx.activation</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.batch_norm.html">mxnet.npx.batch_norm</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.convolution.html">mxnet.npx.convolution</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.dropout.html">mxnet.npx.dropout</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.embedding.html">mxnet.npx.embedding</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.fully_connected.html">mxnet.npx.fully_connected</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.layer_norm.html">mxnet.npx.layer_norm</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.pooling.html">mxnet.npx.pooling</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.rnn.html">mxnet.npx.rnn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.leaky_relu.html">mxnet.npx.leaky_relu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.multibox_detection.html">mxnet.npx.multibox_detection</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.multibox_prior.html">mxnet.npx.multibox_prior</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.multibox_target.html">mxnet.npx.multibox_target</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.roi_pooling.html">mxnet.npx.roi_pooling</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.sigmoid.html">mxnet.npx.sigmoid</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.relu.html">mxnet.npx.relu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.smooth_l1.html">mxnet.npx.smooth_l1</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.softmax.html">mxnet.npx.softmax</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.log_softmax.html">mxnet.npx.log_softmax</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.topk.html">mxnet.npx.topk</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.waitall.html">mxnet.npx.waitall</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.load.html">mxnet.npx.load</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.save.html">mxnet.npx.save</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.one_hot.html">mxnet.npx.one_hot</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.pick.html">mxnet.npx.pick</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.reshape_like.html">mxnet.npx.reshape_like</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.batch_flatten.html">mxnet.npx.batch_flatten</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.batch_dot.html">mxnet.npx.batch_dot</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.gamma.html">mxnet.npx.gamma</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/npx/generated/mxnet.npx.sequence_mask.html">mxnet.npx.sequence_mask</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/gluon/index.html">mxnet.gluon</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/block.html">gluon.Block</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/hybrid_block.html">gluon.HybridBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/symbol_block.html">gluon.SymbolBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/constant.html">gluon.Constant</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/parameter.html">gluon.Parameter</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/trainer.html">gluon.Trainer</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/contrib/index.html">gluon.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/data/index.html">gluon.data</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/gluon/data/vision/index.html">data.vision</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/gluon/data/vision/datasets/index.html">vision.datasets</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../../api/gluon/data/vision/transforms/index.html">vision.transforms</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/loss/index.html">gluon.loss</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/metric/index.html">gluon.metric</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/model_zoo/index.html">gluon.model_zoo.vision</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/nn/index.html">gluon.nn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/rnn/index.html">gluon.rnn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/gluon/utils/index.html">gluon.utils</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/autograd/index.html">mxnet.autograd</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/initializer/index.html">mxnet.initializer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/optimizer/index.html">mxnet.optimizer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/lr_scheduler/index.html">mxnet.lr_scheduler</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/kvstore/index.html">KVStore: Communication for Distributed Training</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/kvstore/index.html#horovod">Horovod</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/kvstore/generated/mxnet.kvstore.Horovod.html">mxnet.kvstore.Horovod</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/kvstore/index.html#byteps">BytePS</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/kvstore/generated/mxnet.kvstore.BytePS.html">mxnet.kvstore.BytePS</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/kvstore/index.html#kvstore-interface">KVStore Interface</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/kvstore/generated/mxnet.kvstore.KVStore.html">mxnet.kvstore.KVStore</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/kvstore/generated/mxnet.kvstore.KVStoreBase.html">mxnet.kvstore.KVStoreBase</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/kvstore/generated/mxnet.kvstore.KVStoreServer.html">mxnet.kvstore.KVStoreServer</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/contrib/index.html">mxnet.contrib</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/io/index.html">contrib.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/ndarray/index.html">contrib.ndarray</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/onnx/index.html">contrib.onnx</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/quantization/index.html">contrib.quantization</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/symbol/index.html">contrib.symbol</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/tensorboard/index.html">contrib.tensorboard</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/tensorrt/index.html">contrib.tensorrt</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/contrib/text/index.html">contrib.text</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/legacy/index.html">Legacy</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/callback/index.html">mxnet.callback</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/image/index.html">mxnet.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/io/index.html">mxnet.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/ndarray/index.html">mxnet.ndarray</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/ndarray.html">ndarray</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/contrib/index.html">ndarray.contrib</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/image/index.html">ndarray.image</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/linalg/index.html">ndarray.linalg</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/op/index.html">ndarray.op</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/random/index.html">ndarray.random</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/register/index.html">ndarray.register</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/sparse/index.html">ndarray.sparse</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/ndarray/utils/index.html">ndarray.utils</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/recordio/index.html">mxnet.recordio</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/symbol/index.html">mxnet.symbol</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/symbol.html">symbol</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/contrib/index.html">symbol.contrib</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/image/index.html">symbol.image</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/linalg/index.html">symbol.linalg</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/op/index.html">symbol.op</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/random/index.html">symbol.random</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/register/index.html">symbol.register</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../../api/legacy/symbol/sparse/index.html">symbol.sparse</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/legacy/visualization/index.html">mxnet.visualization</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/device/index.html">mxnet.device</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/engine/index.html">mxnet.engine</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/executor/index.html">mxnet.executor</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/kvstore_server/index.html">mxnet.kvstore_server</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/profiler/index.html">mxnet.profiler</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/rtc/index.html">mxnet.rtc</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/runtime/index.html">mxnet.runtime</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/runtime/generated/mxnet.runtime.Feature.html">mxnet.runtime.Feature</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/runtime/generated/mxnet.runtime.Features.html">mxnet.runtime.Features</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../../api/runtime/generated/mxnet.runtime.feature_list.html">mxnet.runtime.feature_list</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/test_utils/index.html">mxnet.test_utils</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../../api/util/index.html">mxnet.util</a></li>
</ul>
</li>
</ul>
</nav>
</div>
</header>
<div class="document">
<div class="page-content" role="main">
<style>
/* CSS for nbsphinx extension */
/* remove conflicting styling from Sphinx themes */
div.nbinput,
div.nbinput div.prompt,
div.nbinput div.input_area,
div.nbinput div[class*=highlight],
div.nbinput div[class*=highlight] pre,
div.nboutput,
div.nbinput div.prompt,
div.nbinput div.output_area,
div.nboutput div[class*=highlight],
div.nboutput div[class*=highlight] pre {
background: none;
border: none;
padding: 0 0;
margin: 0;
box-shadow: none;
}
/* avoid gaps between output lines */
div.nboutput div[class*=highlight] pre {
line-height: normal;
}
/* input/output containers */
div.nbinput,
div.nboutput {
display: -webkit-flex;
display: flex;
align-items: flex-start;
margin: 0;
width: 100%;
}
@media (max-width: 540px) {
div.nbinput,
div.nboutput {
flex-direction: column;
}
}
/* input container */
div.nbinput {
padding-top: 5px;
}
/* last container */
div.nblast {
padding-bottom: 5px;
}
/* input prompt */
div.nbinput div.prompt pre {
color: #307FC1;
}
/* output prompt */
div.nboutput div.prompt pre {
color: #BF5B3D;
}
/* all prompts */
div.nbinput div.prompt,
div.nboutput div.prompt {
min-width: 7ex;
padding-top: 0.4em;
padding-right: 0.4em;
text-align: right;
flex: 0;
}
@media (max-width: 540px) {
div.nbinput div.prompt,
div.nboutput div.prompt {
text-align: left;
padding: 0.4em;
}
div.nboutput div.prompt.empty {
padding: 0;
}
}
/* disable scrollbars on prompts */
div.nbinput div.prompt pre,
div.nboutput div.prompt pre {
overflow: hidden;
}
/* input/output area */
div.nbinput div.input_area,
div.nboutput div.output_area {
padding: 0.4em;
-webkit-flex: 1;
flex: 1;
overflow: auto;
}
@media (max-width: 540px) {
div.nbinput div.input_area,
div.nboutput div.output_area {
width: 100%;
}
}
/* input area */
div.nbinput div.input_area {
border: 1px solid #e0e0e0;
border-radius: 2px;
background: #f5f5f5;
}
/* override MathJax center alignment in output cells */
div.nboutput div[class*=MathJax] {
text-align: left !important;
}
/* override sphinx.ext.imgmath center alignment in output cells */
div.nboutput div.math p {
text-align: left;
}
/* standard error */
div.nboutput div.output_area.stderr {
background: #fdd;
}
/* ANSI colors */
.ansi-black-fg { color: #3E424D; }
.ansi-black-bg { background-color: #3E424D; }
.ansi-black-intense-fg { color: #282C36; }
.ansi-black-intense-bg { background-color: #282C36; }
.ansi-red-fg { color: #E75C58; }
.ansi-red-bg { background-color: #E75C58; }
.ansi-red-intense-fg { color: #B22B31; }
.ansi-red-intense-bg { background-color: #B22B31; }
.ansi-green-fg { color: #00A250; }
.ansi-green-bg { background-color: #00A250; }
.ansi-green-intense-fg { color: #007427; }
.ansi-green-intense-bg { background-color: #007427; }
.ansi-yellow-fg { color: #DDB62B; }
.ansi-yellow-bg { background-color: #DDB62B; }
.ansi-yellow-intense-fg { color: #B27D12; }
.ansi-yellow-intense-bg { background-color: #B27D12; }
.ansi-blue-fg { color: #208FFB; }
.ansi-blue-bg { background-color: #208FFB; }
.ansi-blue-intense-fg { color: #0065CA; }
.ansi-blue-intense-bg { background-color: #0065CA; }
.ansi-magenta-fg { color: #D160C4; }
.ansi-magenta-bg { background-color: #D160C4; }
.ansi-magenta-intense-fg { color: #A03196; }
.ansi-magenta-intense-bg { background-color: #A03196; }
.ansi-cyan-fg { color: #60C6C8; }
.ansi-cyan-bg { background-color: #60C6C8; }
.ansi-cyan-intense-fg { color: #258F8F; }
.ansi-cyan-intense-bg { background-color: #258F8F; }
.ansi-white-fg { color: #C5C1B4; }
.ansi-white-bg { background-color: #C5C1B4; }
.ansi-white-intense-fg { color: #A1A6B2; }
.ansi-white-intense-bg { background-color: #A1A6B2; }
.ansi-default-inverse-fg { color: #FFFFFF; }
.ansi-default-inverse-bg { background-color: #000000; }
.ansi-bold { font-weight: bold; }
.ansi-underline { text-decoration: underline; }
/* Some additional styling taken form the Jupyter notebook CSS */
div.rendered_html table {
border: none;
border-collapse: collapse;
border-spacing: 0;
color: black;
font-size: 12px;
table-layout: fixed;
}
div.rendered_html thead {
border-bottom: 1px solid black;
vertical-align: bottom;
}
div.rendered_html tr,
div.rendered_html th,
div.rendered_html td {
text-align: right;
vertical-align: middle;
padding: 0.5em 0.5em;
line-height: normal;
white-space: normal;
max-width: none;
border: none;
}
div.rendered_html th {
font-weight: bold;
}
div.rendered_html tbody tr:nth-child(odd) {
background: #f5f5f5;
}
div.rendered_html tbody tr:hover {
background: rgba(66, 165, 245, 0.2);
}
</style>
<!--- Licensed to the Apache Software Foundation (ASF) under one --><!--- or more contributor license agreements. See the NOTICE file --><!--- distributed with this work for additional information --><!--- regarding copyright ownership. The ASF licenses this file --><!--- to you under the Apache License, Version 2.0 (the --><!--- "License"); you may not use this file except in compliance --><!--- with the License. You may obtain a copy of the License at --><!--- http://www.apache.org/licenses/LICENSE-2.0 --><!--- Unless required by applicable law or agreed to in writing, --><!--- software distributed under the License is distributed on an --><!--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY --><!--- KIND, either express or implied. See the License for the --><!--- specific language governing permissions and limitations --><!--- under the License. --><div class="section" id="Loss-functions">
<h1>Loss functions<a class="headerlink" href="#Loss-functions" title="Permalink to this headline"></a></h1>
<p>Loss functions are used to train neural networks and to compute the difference between output and target variable. A critical component of training neural networks is the loss function. A loss function is a quantative measure of how bad the predictions of the network are when compared to ground truth labels. Given this score, a network can improve by iteratively updating its weights to minimise this loss. Some tasks use a combination of multiple loss functions, but often you’ll just use one.
MXNet Gluon provides a number of the most commonly used loss functions, and you’ll choose certain loss functions depending on your network and task. Some common task and loss function pairs include:</p>
<ul class="simple">
<li><p>Regression: <a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.L1Loss"><span class="std std-ref">L1Loss</span></a>, <a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.L2Loss"><span class="std std-ref">L2Loss</span></a></p></li>
<li><p>Classification: <a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.SigmoidBinaryCrossEntropyLoss"><span class="std std-ref">SigmoidBinaryCrossEntropyLoss</span></a>, <a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.SoftmaxCrossEntropyLoss"><span class="std std-ref">SoftmaxCrossEntropyLoss</span></a></p></li>
<li><p>Embeddings: <a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.HingeLoss"><span class="std std-ref">HingeLoss</span></a></p></li>
</ul>
<p>We’ll first import the modules, where the <code class="docutils literal notranslate"><span class="pre">mxnet.gluon.loss</span></code> module is imported as <code class="docutils literal notranslate"><span class="pre">gloss</span></code> to avoid the commonly used name <code class="docutils literal notranslate"><span class="pre">loss</span></code>.</p>
<div class="nbinput nblast docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[1]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="kn">from</span> <span class="nn">IPython</span> <span class="kn">import</span> <span class="n">display</span>
<span class="kn">from</span> <span class="nn">matplotlib</span> <span class="kn">import</span> <span class="n">pyplot</span> <span class="k">as</span> <span class="n">plt</span>
<span class="kn">import</span> <span class="nn">mxnet</span> <span class="k">as</span> <span class="nn">mx</span>
<span class="kn">from</span> <span class="nn">mxnet</span> <span class="kn">import</span> <span class="n">np</span><span class="p">,</span> <span class="n">npx</span><span class="p">,</span> <span class="n">autograd</span>
<span class="kn">from</span> <span class="nn">mxnet.gluon</span> <span class="kn">import</span> <span class="n">nn</span><span class="p">,</span> <span class="n">loss</span> <span class="k">as</span> <span class="n">gloss</span>
</pre></div>
</div>
</div>
<div class="section" id="Basic-Usages">
<h2>Basic Usages<a class="headerlink" href="#Basic-Usages" title="Permalink to this headline"></a></h2>
<p>Now let’s create an instance of the <span class="math notranslate nohighlight">\(\ell_2\)</span> loss, which is commonly used in regression tasks.</p>
<div class="nbinput nblast docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[2]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">loss</span> <span class="o">=</span> <span class="n">gloss</span><span class="o">.</span><span class="n">L2Loss</span><span class="p">()</span>
</pre></div>
</div>
</div>
<p>And then feed two inputs to compute the elementwise loss values.</p>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[3]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">x</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">((</span><span class="mi">2</span><span class="p">,))</span>
<span class="n">y</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">((</span><span class="mi">2</span><span class="p">,))</span> <span class="o">*</span> <span class="mi">2</span>
<span class="n">loss</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">)</span>
</pre></div>
</div>
</div>
<div class="nboutput docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area stderr docutils container">
<div class="highlight"><pre>
[04:48:36] /work/mxnet/src/storage/storage.cc:202: Using Pooled (Naive) StorageManager for CPU
</pre></div></div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[3]:
</pre></div>
</div>
<div class="output_area highlight-none notranslate"><div class="highlight"><pre>
<span></span>array([0.5, 0.5])
</pre></div>
</div>
</div>
<p>These values should be equal to the math definition: <span class="math notranslate nohighlight">\(0.5\|x-y\|^2\)</span>.</p>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[4]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="mf">.5</span> <span class="o">*</span> <span class="p">(</span><span class="n">x</span> <span class="o">-</span> <span class="n">y</span><span class="p">)</span><span class="o">**</span><span class="mi">2</span>
</pre></div>
</div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[4]:
</pre></div>
</div>
<div class="output_area highlight-none notranslate"><div class="highlight"><pre>
<span></span>array([0.5, 0.5])
</pre></div>
</div>
</div>
<p>Next we show how to use a loss function to compute gradients.</p>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[5]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">X</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">random</span><span class="o">.</span><span class="n">uniform</span><span class="p">(</span><span class="n">size</span><span class="o">=</span><span class="p">(</span><span class="mi">2</span><span class="p">,</span> <span class="mi">4</span><span class="p">))</span>
<span class="n">net</span> <span class="o">=</span> <span class="n">nn</span><span class="o">.</span><span class="n">Dense</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
<span class="n">net</span><span class="o">.</span><span class="n">initialize</span><span class="p">()</span>
<span class="k">with</span> <span class="n">autograd</span><span class="o">.</span><span class="n">record</span><span class="p">():</span>
<span class="n">l</span> <span class="o">=</span> <span class="n">loss</span><span class="p">(</span><span class="n">net</span><span class="p">(</span><span class="n">X</span><span class="p">),</span> <span class="n">y</span><span class="p">)</span>
<span class="nb">print</span><span class="p">(</span><span class="n">l</span><span class="p">)</span>
</pre></div>
</div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area docutils container">
<div class="highlight"><pre>
[2.0564232 2.0480375]
</pre></div></div>
</div>
<p>We can compute the gradients w.r.t. the loss function.</p>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[6]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">l</span><span class="o">.</span><span class="n">backward</span><span class="p">()</span>
<span class="nb">print</span><span class="p">(</span><span class="n">net</span><span class="o">.</span><span class="n">weight</span><span class="o">.</span><span class="n">grad</span><span class="p">())</span>
</pre></div>
</div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area docutils container">
<div class="highlight"><pre>
[[-2.0926476 -1.6370986 -1.6798826 -2.2344642]]
</pre></div></div>
</div>
</div>
<div class="section" id="Loss-functions">
<h2>Loss functions<a class="headerlink" href="#Loss-functions" title="Permalink to this headline"></a></h2>
<p>Most commonly used loss functions can be divided into 2 categories: regression and classification.</p>
<p>Let’s first visualize several regression losses. We visualize the loss values versus the predicted values with label values fixed to be 0.</p>
<div class="nbinput nblast docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[7]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="k">def</span> <span class="nf">plot</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">):</span>
<span class="n">display</span><span class="o">.</span><span class="n">set_matplotlib_formats</span><span class="p">(</span><span class="s1">&#39;svg&#39;</span><span class="p">)</span>
<span class="n">plt</span><span class="o">.</span><span class="n">plot</span><span class="p">(</span><span class="n">x</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">(),</span> <span class="n">y</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">())</span>
<span class="n">plt</span><span class="o">.</span><span class="n">xlabel</span><span class="p">(</span><span class="s1">&#39;x&#39;</span><span class="p">)</span>
<span class="n">plt</span><span class="o">.</span><span class="n">ylabel</span><span class="p">(</span><span class="s1">&#39;loss&#39;</span><span class="p">)</span>
<span class="n">plt</span><span class="o">.</span><span class="n">show</span><span class="p">()</span>
<span class="k">def</span> <span class="nf">show_regression_loss</span><span class="p">(</span><span class="n">loss</span><span class="p">):</span>
<span class="n">x</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">arange</span><span class="p">(</span><span class="o">-</span><span class="mi">5</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="mf">.1</span><span class="p">)</span>
<span class="n">y</span> <span class="o">=</span> <span class="n">loss</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">zeros_like</span><span class="p">(</span><span class="n">x</span><span class="p">))</span>
<span class="n">plot</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">)</span>
</pre></div>
</div>
</div>
<p>Then plot the classification losses with label values fixed to be 1.</p>
<div class="nbinput nblast docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[8]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="k">def</span> <span class="nf">show_classification_loss</span><span class="p">(</span><span class="n">loss</span><span class="p">):</span>
<span class="n">x</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">arange</span><span class="p">(</span><span class="o">-</span><span class="mi">5</span><span class="p">,</span> <span class="mi">5</span><span class="p">,</span> <span class="mf">.1</span><span class="p">)</span>
<span class="n">y</span> <span class="o">=</span> <span class="n">loss</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">ones_like</span><span class="p">(</span><span class="n">x</span><span class="p">))</span>
<span class="n">plot</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">)</span>
</pre></div>
</div>
</div>
<div class="section" id="L1-Loss">
<h3><a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.L1Loss"><span class="std std-ref">L1 Loss</span></a><a class="headerlink" href="#L1-Loss" title="Permalink to this headline"></a></h3>
<p>L1 Loss, also called Mean Absolute Error, computes the sum of absolute distance between target values and the output of the neural network. It is defined as:</p>
<div class="math notranslate nohighlight">
\[L = \sum_i \vert {label}_i - {pred}_i \vert.\]</div>
<p>It is a non-smooth function that can lead to non-convergence. It creates the same gradient for small and large loss values, which can be problematic for the learning process.</p>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[9]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">show_regression_loss</span><span class="p">(</span><span class="n">gloss</span><span class="o">.</span><span class="n">L1Loss</span><span class="p">())</span>
</pre></div>
</div>
</div>
<div class="nboutput docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area stderr docutils container">
<div class="highlight"><pre>
/tmp/ipykernel_191919/841998161.py:2: DeprecationWarning: `set_matplotlib_formats` is deprecated since IPython 7.23, directly use `matplotlib_inline.backend_inline.set_matplotlib_formats()`
display.set_matplotlib_formats(&#39;svg&#39;)
</pre></div></div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area docutils container">
<img alt="../../../../_images/tutorials_packages_gluon_loss_loss_17_1.svg" src="../../../../_images/tutorials_packages_gluon_loss_loss_17_1.svg" /></div>
</div>
</div>
<div class="section" id="L2-Loss">
<h3><a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.L2Loss"><span class="std std-ref">L2 Loss</span></a><a class="headerlink" href="#L2-Loss" title="Permalink to this headline"></a></h3>
<p>L2Loss, also called Mean Squared Error, is a regression loss function that computes the squared distances between the target values and the output of the neural network. It is defined as:</p>
<div class="math notranslate nohighlight">
\[L = \frac{1}{2} \sum_i \vert {label}_i - {pred}_i \vert^2.\]</div>
<p>Compared to L1, L2 loss it is a smooth function and it creates larger gradients for large loss values. However due to the squaring it puts high weight on outliers.</p>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[10]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">show_regression_loss</span><span class="p">(</span><span class="n">gloss</span><span class="o">.</span><span class="n">L2Loss</span><span class="p">())</span>
</pre></div>
</div>
</div>
<div class="nboutput docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area stderr docutils container">
<div class="highlight"><pre>
/tmp/ipykernel_191919/841998161.py:2: DeprecationWarning: `set_matplotlib_formats` is deprecated since IPython 7.23, directly use `matplotlib_inline.backend_inline.set_matplotlib_formats()`
display.set_matplotlib_formats(&#39;svg&#39;)
</pre></div></div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area docutils container">
<img alt="../../../../_images/tutorials_packages_gluon_loss_loss_19_1.svg" src="../../../../_images/tutorials_packages_gluon_loss_loss_19_1.svg" /></div>
</div>
</div>
<div class="section" id="Huber-Loss">
<h3><a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.HuberLoss"><span class="std std-ref">Huber Loss</span></a><a class="headerlink" href="#Huber-Loss" title="Permalink to this headline"></a></h3>
<p>HuberLoss combines advantages of L1 and L2 loss. It calculates a smoothed L1 loss that is equal to L1 if the absolute error exceeds a threshold</p>
<div class="math notranslate nohighlight">
\[\rho\]</div>
<p>, otherwise it is equal to L2. It is defined as:</p>
<div class="math notranslate nohighlight">
\[\begin{split}\begin{split}L = \sum_i \begin{cases} \frac{1}{2 {rho}} ({label}_i - {pred}_i)^2 &amp;
\text{ if } |{label}_i - {pred}_i| &lt; {rho} \\
|{label}_i - {pred}_i| - \frac{{rho}}{2} &amp;
\text{ otherwise }
\end{cases}\end{split}\end{split}\]</div>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[11]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">show_regression_loss</span><span class="p">(</span><span class="n">gloss</span><span class="o">.</span><span class="n">HuberLoss</span><span class="p">(</span><span class="n">rho</span><span class="o">=</span><span class="mi">1</span><span class="p">))</span>
</pre></div>
</div>
</div>
<div class="nboutput docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area stderr docutils container">
<div class="highlight"><pre>
/tmp/ipykernel_191919/841998161.py:2: DeprecationWarning: `set_matplotlib_formats` is deprecated since IPython 7.23, directly use `matplotlib_inline.backend_inline.set_matplotlib_formats()`
display.set_matplotlib_formats(&#39;svg&#39;)
</pre></div></div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area docutils container">
<img alt="../../../../_images/tutorials_packages_gluon_loss_loss_21_1.svg" src="../../../../_images/tutorials_packages_gluon_loss_loss_21_1.svg" /></div>
</div>
<p>An example of where Huber Loss is used can be found in <a class="reference external" href="https://openai.com/blog/openai-baselines-dqn/">Deep Q Network</a>.</p>
</div>
<div class="section" id="Cross-Entropy-Loss-with-Sigmoid">
<h3><a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.SigmoidBinaryCrossEntropyLoss"><span class="std std-ref">Cross Entropy Loss with Sigmoid</span></a><a class="headerlink" href="#Cross-Entropy-Loss-with-Sigmoid" title="Permalink to this headline"></a></h3>
<p>Binary Cross Entropy is a loss function used for binary classification problems e.g. classifying images into 2 classes. Cross entropy measures the difference between two probability distributions and it is defined as:</p>
<div class="math notranslate nohighlight">
\[\sum_i -{(y\log(p) + (1 - y)\log(1 - p))}\]</div>
<p>Before the loss is computed a sigmoid activation is applied per default. If your network has <code class="docutils literal notranslate"><span class="pre">sigmoid</span></code> activation as last layer, then you need set <code class="docutils literal notranslate"><span class="pre">from_sigmoid</span></code> to False, to avoid applying the sigmoid function twice.</p>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[12]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">show_classification_loss</span><span class="p">(</span><span class="n">gloss</span><span class="o">.</span><span class="n">SigmoidBinaryCrossEntropyLoss</span><span class="p">())</span>
</pre></div>
</div>
</div>
<div class="nboutput docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area stderr docutils container">
<div class="highlight"><pre>
/tmp/ipykernel_191919/841998161.py:2: DeprecationWarning: `set_matplotlib_formats` is deprecated since IPython 7.23, directly use `matplotlib_inline.backend_inline.set_matplotlib_formats()`
display.set_matplotlib_formats(&#39;svg&#39;)
</pre></div></div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area docutils container">
<img alt="../../../../_images/tutorials_packages_gluon_loss_loss_23_1.svg" src="../../../../_images/tutorials_packages_gluon_loss_loss_23_1.svg" /></div>
</div>
</div>
<div class="section" id="Cross-Entropy-Loss-with-Softmax">
<h3><a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.SoftmaxCrossEntropyLoss"><span class="std std-ref">Cross Entropy Loss with Softmax</span></a><a class="headerlink" href="#Cross-Entropy-Loss-with-Softmax" title="Permalink to this headline"></a></h3>
<p>In classification, we often apply the softmax operator to the predicted outputs to obtain prediction probabilities, and then apply the cross entropy loss against the true labels:</p>
<div class="math notranslate nohighlight">
\[\begin{split}\begin{align}\begin{aligned}p = \text{softmax}({pred})\\L = -\sum_i \sum_j {label}_j \log p_{ij}\end{aligned}\end{align}\end{split}\]</div>
<p>Running these two steps one-by-one, however, may lead to numerical instabilities. The <code class="docutils literal notranslate"><span class="pre">loss</span></code> module provides a single operators with softmax and cross entropy fused to avoid such problem.</p>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[13]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">loss</span> <span class="o">=</span> <span class="n">gloss</span><span class="o">.</span><span class="n">SoftmaxCrossEntropyLoss</span><span class="p">()</span>
<span class="n">x</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">10</span><span class="p">],</span> <span class="p">[</span><span class="mi">8</span><span class="p">,</span> <span class="mi">2</span><span class="p">]])</span>
<span class="n">y</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">0</span><span class="p">,</span> <span class="mi">1</span><span class="p">])</span>
<span class="n">loss</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">)</span>
</pre></div>
</div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[13]:
</pre></div>
</div>
<div class="output_area highlight-none notranslate"><div class="highlight"><pre>
<span></span>array([9.000123 , 6.0024757])
</pre></div>
</div>
</div>
</div>
<div class="section" id="Hinge-Loss">
<h3><a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.HingeLoss"><span class="std std-ref">Hinge Loss</span></a><a class="headerlink" href="#Hinge-Loss" title="Permalink to this headline"></a></h3>
<p>Commonly used in Support Vector Machines (SVMs), Hinge Loss is used to additionally penalize predictions that are correct but fall within a margin between classes (the region around a decision boundary). Unlike <code class="docutils literal notranslate"><span class="pre">SoftmaxCrossEntropyLoss</span></code>, it’s rarely used for neural network training. It is defined as:</p>
<div class="math notranslate nohighlight">
\[L = \sum_i max(0, {margin} - {pred}_i \cdot {label}_i)\]</div>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[14]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">show_classification_loss</span><span class="p">(</span><span class="n">gloss</span><span class="o">.</span><span class="n">HingeLoss</span><span class="p">())</span>
</pre></div>
</div>
</div>
<div class="nboutput docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area stderr docutils container">
<div class="highlight"><pre>
/tmp/ipykernel_191919/841998161.py:2: DeprecationWarning: `set_matplotlib_formats` is deprecated since IPython 7.23, directly use `matplotlib_inline.backend_inline.set_matplotlib_formats()`
display.set_matplotlib_formats(&#39;svg&#39;)
</pre></div></div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area docutils container">
<img alt="../../../../_images/tutorials_packages_gluon_loss_loss_27_1.svg" src="../../../../_images/tutorials_packages_gluon_loss_loss_27_1.svg" /></div>
</div>
</div>
<div class="section" id="Logistic-Loss">
<h3><a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.LogisticLoss"><span class="std std-ref">Logistic Loss</span></a><a class="headerlink" href="#Logistic-Loss" title="Permalink to this headline"></a></h3>
<p>The Logistic Loss function computes the performance of binary classification models.</p>
<div class="math notranslate nohighlight">
\[L = \sum_i \log(1 + \exp(- {pred}_i \cdot {label}_i))\]</div>
<p>The log loss decreases the closer the prediction is to the actual label. It is sensitive to outliers, because incorrectly classified points are penalized more.</p>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[15]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">show_classification_loss</span><span class="p">(</span><span class="n">gloss</span><span class="o">.</span><span class="n">LogisticLoss</span><span class="p">())</span>
</pre></div>
</div>
</div>
<div class="nboutput docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area stderr docutils container">
<div class="highlight"><pre>
/tmp/ipykernel_191919/841998161.py:2: DeprecationWarning: `set_matplotlib_formats` is deprecated since IPython 7.23, directly use `matplotlib_inline.backend_inline.set_matplotlib_formats()`
display.set_matplotlib_formats(&#39;svg&#39;)
</pre></div></div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area docutils container">
<img alt="../../../../_images/tutorials_packages_gluon_loss_loss_29_1.svg" src="../../../../_images/tutorials_packages_gluon_loss_loss_29_1.svg" /></div>
</div>
</div>
<div class="section" id="Kullback-Leibler-Divergence-Loss">
<h3><a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.KLDivLoss"><span class="std std-ref">Kullback-Leibler Divergence Loss</span></a><a class="headerlink" href="#Kullback-Leibler-Divergence-Loss" title="Permalink to this headline"></a></h3>
<p>The Kullback-Leibler divergence loss measures the divergence between two probability distributions by calculating the difference between cross entropy and entropy. It takes as input the probability of predicted label and the probability of true label.</p>
<div class="math notranslate nohighlight">
\[L = \sum_i {label}_i * \big[\log({label}_i) - {pred}_i\big]\]</div>
<p>The loss is large, if the predicted probability distribution is far from the ground truth probability distribution. KL divergence is an asymmetric measure. KL divergence loss can be used in Variational Autoencoders (VAEs), and reinforcement learning policy networks such as Trust Region Policy Optimization (TRPO)</p>
<p>For instance, in the following example we get a KL divergence of 0.02. We set <code class="docutils literal notranslate"><span class="pre">from_logits=False</span></code>, so the loss functions will apply <code class="docutils literal notranslate"><span class="pre">log_softmax</span></code> on the network output, before computing the KL divergence.</p>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[16]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">output</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mf">0.39056206</span><span class="p">,</span> <span class="mf">1.3068528</span><span class="p">,</span> <span class="mf">0.39056206</span><span class="p">,</span> <span class="o">-</span><span class="mf">0.30258512</span><span class="p">]])</span>
<span class="nb">print</span><span class="p">(</span><span class="s1">&#39;output.softmax(): </span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">npx</span><span class="o">.</span><span class="n">softmax</span><span class="p">(</span><span class="n">output</span><span class="p">)</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span><span class="o">.</span><span class="n">tolist</span><span class="p">()))</span>
<span class="n">target_dist</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([[</span><span class="mf">0.3</span><span class="p">,</span> <span class="mf">0.4</span><span class="p">,</span> <span class="mf">0.1</span><span class="p">,</span> <span class="mf">0.2</span><span class="p">]])</span>
<span class="n">loss_fn</span> <span class="o">=</span> <span class="n">gloss</span><span class="o">.</span><span class="n">KLDivLoss</span><span class="p">(</span><span class="n">from_logits</span><span class="o">=</span><span class="kc">False</span><span class="p">)</span>
<span class="n">loss</span> <span class="o">=</span> <span class="n">loss_fn</span><span class="p">(</span><span class="n">output</span><span class="p">,</span> <span class="n">target_dist</span><span class="p">)</span>
<span class="nb">print</span><span class="p">(</span><span class="s1">&#39;loss (kl divergence): </span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">loss</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span><span class="o">.</span><span class="n">tolist</span><span class="p">()))</span>
</pre></div>
</div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area docutils container">
<div class="highlight"><pre>
output.softmax(): [[0.19999998807907104, 0.5, 0.19999998807907104, 0.09999999403953552]]
loss (kl divergence): [0.025424206629395485]
</pre></div></div>
</div>
</div>
<div class="section" id="Triplet-Loss">
<h3><a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.TripletLoss"><span class="std std-ref">Triplet Loss</span></a><a class="headerlink" href="#Triplet-Loss" title="Permalink to this headline"></a></h3>
<p>Triplet loss takes three input arrays and measures the relative similarity. It takes a positive and negative input and the anchor.</p>
<div class="math notranslate nohighlight">
\[L = \sum_i \max(\Vert {pos_i}_i - {pred} \Vert_2^2 -
\Vert {neg_i}_i - {pred} \Vert_2^2 + {margin}, 0)\]</div>
<p>The loss function minimizes the distance between similar inputs and maximizes the distance between dissimilar ones. In the case of learning embeddings for images of characters, the network may get as input the following 3 images:</p>
<p><img alt="triplet_loss" src="../../../../_images/triplet_loss.png" /></p>
<p>The network would learn to minimize the distance between the two <code class="docutils literal notranslate"><span class="pre">A</span></code>’s and maximize the distance between <code class="docutils literal notranslate"><span class="pre">A</span></code> and <code class="docutils literal notranslate"><span class="pre">Z</span></code>.</p>
</div>
<div class="section" id="CTC-Loss">
<h3><a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.CTCLoss"><span class="std std-ref">CTC Loss</span></a><a class="headerlink" href="#CTC-Loss" title="Permalink to this headline"></a></h3>
<p>CTC Loss is the <a class="reference external" href="https://distill.pub/2017/ctc/">connectionist temporal classification loss</a> . It is used to train recurrent neural networks with variable time dimension. It learns the alignment and labelling of input sequences. It takes a sequence as input and gives probabilities for each timestep. For instance, in the following image the word is not well aligned with the 5 timesteps because of the different sizes of characters. CTC Loss finds for each timestep the highest probability
e.g. <code class="docutils literal notranslate"><span class="pre">t1</span></code> presents with high probability a <code class="docutils literal notranslate"><span class="pre">C</span></code>. It combines the highest probapilities and returns the best path decoding.</p>
<p><img alt="ctc_loss" src="../../../../_images/ctc_loss.png" /></p>
</div>
<div class="section" id="Cosine-Embedding-Loss">
<h3><a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.CosineEmbeddingLoss"><span class="std std-ref">Cosine Embedding Loss</span></a><a class="headerlink" href="#Cosine-Embedding-Loss" title="Permalink to this headline"></a></h3>
<p>The cosine embedding loss computes the cosine distance between two input vectors.</p>
<div class="math notranslate nohighlight">
\[\begin{split}\begin{split}L = \sum_i \begin{cases} 1 - {cos\_sim({input1}_i, {input2}_i)} &amp; \text{ if } {label}_i = 1\\
{cos\_sim({input1}_i, {input2}_i)} &amp; \text{ if } {label}_i = -1 \end{cases}\\
cos\_sim(input1, input2) = \frac{{input1}_i.{input2}_i}{||{input1}_i||.||{input2}_i||}\end{split}\end{split}\]</div>
<p>Cosine distance measures the similarity between two arrays given a label and is typically used for learning nonlinear embeddings. For instance, in the following code example we measure the similarity between the input vectors <code class="docutils literal notranslate"><span class="pre">x</span></code> and <code class="docutils literal notranslate"><span class="pre">y</span></code>. Since they are the same the label equals <code class="docutils literal notranslate"><span class="pre">1</span></code>. The loss function returns</p>
<div class="math notranslate nohighlight">
\[\sum_i 1 - {cos\_sim({input1}_i, {input2}_i)}\]</div>
<p>which is equal <code class="docutils literal notranslate"><span class="pre">0</span></code>.</p>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[17]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">x</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span><span class="mi">0</span><span class="p">,</span><span class="mi">1</span><span class="p">,</span><span class="mi">0</span><span class="p">,</span><span class="mi">1</span><span class="p">,</span><span class="mi">0</span><span class="p">])</span>
<span class="n">y</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span><span class="mi">0</span><span class="p">,</span><span class="mi">1</span><span class="p">,</span><span class="mi">0</span><span class="p">,</span><span class="mi">1</span><span class="p">,</span><span class="mi">0</span><span class="p">])</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">])</span>
<span class="n">loss</span> <span class="o">=</span> <span class="n">gloss</span><span class="o">.</span><span class="n">CosineEmbeddingLoss</span><span class="p">()</span>
<span class="nb">print</span><span class="p">(</span><span class="n">loss</span><span class="p">(</span><span class="n">x</span><span class="p">,</span><span class="n">y</span><span class="p">,</span><span class="n">label</span><span class="p">))</span>
</pre></div>
</div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area docutils container">
<div class="highlight"><pre>
[0.]
</pre></div></div>
</div>
<p>Now let’s make <code class="docutils literal notranslate"><span class="pre">y</span></code> the opposite of <code class="docutils literal notranslate"><span class="pre">x</span></code>, so we set the label <code class="docutils literal notranslate"><span class="pre">-1</span></code> and the function will return</p>
<div class="math notranslate nohighlight">
\[\sum_i cos\_sim(input1, input2)\]</div>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[18]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">x</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span><span class="mi">0</span><span class="p">,</span><span class="mi">1</span><span class="p">,</span><span class="mi">0</span><span class="p">,</span><span class="mi">1</span><span class="p">,</span><span class="mi">0</span><span class="p">])</span>
<span class="n">y</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">0</span><span class="p">,</span><span class="mi">1</span><span class="p">,</span><span class="mi">0</span><span class="p">,</span><span class="mi">1</span><span class="p">,</span><span class="mi">0</span><span class="p">,</span><span class="mi">1</span><span class="p">])</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">mx</span><span class="o">.</span><span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="o">-</span><span class="mi">1</span><span class="p">])</span>
<span class="n">loss</span> <span class="o">=</span> <span class="n">gloss</span><span class="o">.</span><span class="n">CosineEmbeddingLoss</span><span class="p">()</span>
<span class="nb">print</span><span class="p">(</span><span class="n">loss</span><span class="p">(</span><span class="n">x</span><span class="p">,</span><span class="n">y</span><span class="p">,</span><span class="n">label</span><span class="p">))</span>
</pre></div>
</div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt empty docutils container">
</div>
<div class="output_area docutils container">
<div class="highlight"><pre>
[0.]
</pre></div></div>
</div>
</div>
<div class="section" id="PoissonNLLLoss">
<h3><a class="reference internal" href="../../../../api/gluon/loss/index.html#mxnet.gluon.loss.PoissonNLLLoss"><span class="std std-ref">PoissonNLLLoss</span></a><a class="headerlink" href="#PoissonNLLLoss" title="Permalink to this headline"></a></h3>
<p>Poisson distribution is widely used for modelling count data. It is defined as:</p>
<div class="math notranslate nohighlight">
\[f(x) = \frac{\mu ^ {\kern 0.08 em x} e ^ {-\mu}} {x!} \qquad \qquad x = 0,1,2 , \ldots \,.\]</div>
<p>For instance, the count of cars in road traffic approximately follows a Poisson distribution. Using an ordinary least squares model for Poisson distributed data would not work well because of two reasons: - count data cannot be negative - variance may not be constant</p>
<p>Instead we can use a Poisson regression model, also known as log-linear model. Thereby the Poisson incident rate</p>
<div class="math notranslate nohighlight">
\[\mu\]</div>
<p>is modelled by a linear combination of unknown parameters. We can then use the PoissonNLLLoss which calculates the negative log likelihood for a target that follows a Poisson distribution.</p>
<div class="math notranslate nohighlight">
\[L = \text{pred} - \text{target} * \log(\text{pred}) +\log(\text{target!})\]</div>
</div>
</div>
<div class="section" id="Advanced:-Weighted-Loss">
<h2>Advanced: Weighted Loss<a class="headerlink" href="#Advanced:-Weighted-Loss" title="Permalink to this headline"></a></h2>
<p>Some examples in a batch may be more important than others. We can apply weights to individual examples during the forward pass of the loss function using the <code class="docutils literal notranslate"><span class="pre">sample_weight</span></code> argument. All examples are weighted equally by default.</p>
<div class="nbinput docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[19]:
</pre></div>
</div>
<div class="input_area highlight-python notranslate"><div class="highlight"><pre>
<span></span><span class="n">x</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">((</span><span class="mi">2</span><span class="p">,))</span>
<span class="n">y</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">ones</span><span class="p">((</span><span class="mi">2</span><span class="p">,))</span> <span class="o">*</span> <span class="mi">2</span>
<span class="n">loss</span> <span class="o">=</span> <span class="n">gloss</span><span class="o">.</span><span class="n">L2Loss</span><span class="p">()</span>
<span class="n">loss</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">([</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">]))</span>
</pre></div>
</div>
</div>
<div class="nboutput nblast docutils container">
<div class="prompt highlight-none notranslate"><div class="highlight"><pre><span></span>[19]:
</pre></div>
</div>
<div class="output_area highlight-none notranslate"><div class="highlight"><pre>
<span></span>array([0.5, 1. ])
</pre></div>
</div>
</div>
</div>
<div class="section" id="Conclusion">
<h2>Conclusion<a class="headerlink" href="#Conclusion" title="Permalink to this headline"></a></h2>
<p>In this tutorial we saw an example of how to evaluate model performance using loss functions (during the forward pass). Crucially, we then saw how calculate parameter gradients (using <code class="docutils literal notranslate"><span class="pre">backward</span></code>) which would minimise this loss. You should now have a better understanding of when to apply different loss functions, especially for regression vs classification tasks.</p>
</div>
<div class="section" id="Recommended-Next-Steps">
<h2>Recommended Next Steps<a class="headerlink" href="#Recommended-Next-Steps" title="Permalink to this headline"></a></h2>
<p>In addition to loss functions, which are used for explicit optimization, you might want to look at metrics that give useful evaluation feedback even if they’re not explicitly optimized for in the same way as the loss. You might also want to learn more about the mechanics of the backpropagation stage in the autograd tutorial.</p>
</div>
</div>
<hr class="feedback-hr-top" />
<div class="feedback-container">
<div class="feedback-question">Did this page help you?</div>
<div class="feedback-answer-container">
<div class="feedback-answer yes-link" data-response="yes">Yes</div>
<div class="feedback-answer no-link" data-response="no">No</div>
</div>
<div class="feedback-thank-you">Thanks for your feedback!</div>
</div>
<hr class="feedback-hr-bottom" />
</div>
<div class="side-doc-outline">
<div class="side-doc-outline--content">
<div class="localtoc">
<p class="caption">
<span class="caption-text">Table Of Contents</span>
</p>
<ul>
<li><a class="reference internal" href="#">Loss functions</a><ul>
<li><a class="reference internal" href="#Basic-Usages">Basic Usages</a></li>
<li><a class="reference internal" href="#Loss-functions">Loss functions</a><ul>
<li><a class="reference internal" href="#L1-Loss">L1 Loss</a></li>
<li><a class="reference internal" href="#L2-Loss">L2 Loss</a></li>
<li><a class="reference internal" href="#Huber-Loss">Huber Loss</a></li>
<li><a class="reference internal" href="#Cross-Entropy-Loss-with-Sigmoid">Cross Entropy Loss with Sigmoid</a></li>
<li><a class="reference internal" href="#Cross-Entropy-Loss-with-Softmax">Cross Entropy Loss with Softmax</a></li>
<li><a class="reference internal" href="#Hinge-Loss">Hinge Loss</a></li>
<li><a class="reference internal" href="#Logistic-Loss">Logistic Loss</a></li>
<li><a class="reference internal" href="#Kullback-Leibler-Divergence-Loss">Kullback-Leibler Divergence Loss</a></li>
<li><a class="reference internal" href="#Triplet-Loss">Triplet Loss</a></li>
<li><a class="reference internal" href="#CTC-Loss">CTC Loss</a></li>
<li><a class="reference internal" href="#Cosine-Embedding-Loss">Cosine Embedding Loss</a></li>
<li><a class="reference internal" href="#PoissonNLLLoss">PoissonNLLLoss</a></li>
</ul>
</li>
<li><a class="reference internal" href="#Advanced:-Weighted-Loss">Advanced: Weighted Loss</a></li>
<li><a class="reference internal" href="#Conclusion">Conclusion</a></li>
<li><a class="reference internal" href="#Recommended-Next-Steps">Recommended Next Steps</a></li>
</ul>
</li>
</ul>
</div>
</div>
</div>
<div class="clearer"></div>
</div><div class="pagenation">
<a id="button-prev" href="kl_divergence.html" class="mdl-button mdl-js-button mdl-js-ripple-effect mdl-button--colored" role="botton" accesskey="P">
<i class="pagenation-arrow-L fas fa-arrow-left fa-lg"></i>
<div class="pagenation-text">
<span class="pagenation-direction">Previous</span>
<div>Kullback-Leibler (KL) Divergence</div>
</div>
</a>
<a id="button-next" href="../text/index.html" class="mdl-button mdl-js-button mdl-js-ripple-effect mdl-button--colored" role="botton" accesskey="N">
<i class="pagenation-arrow-R fas fa-arrow-right fa-lg"></i>
<div class="pagenation-text">
<span class="pagenation-direction">Next</span>
<div>Text Tutorials</div>
</div>
</a>
</div>
<footer class="site-footer h-card">
<div class="wrapper">
<div class="row">
<div class="col-4">
<h4 class="footer-category-title">Resources</h4>
<ul class="contact-list">
<li><a href="https://lists.apache.org/list.html?dev@mxnet.apache.org">Mailing list</a> <a class="u-email" href="mailto:dev-subscribe@mxnet.apache.org">(subscribe)</a></li>
<li><a href="https://discuss.mxnet.io">MXNet Discuss forum</a></li>
<li><a href="https://github.com/apache/mxnet/issues">Github Issues</a></li>
<li><a href="https://github.com/apache/mxnet/projects">Projects</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/MXNET/Apache+MXNet+Home">Developer Wiki</a></li>
<li><a href="/community">Contribute To MXNet</a></li>
</ul>
</div>
<div class="col-4"><ul class="social-media-list"><li><a href="https://github.com/apache/mxnet"><svg class="svg-icon"><use xlink:href="../../../../_static/minima-social-icons.svg#github"></use></svg> <span class="username">apache/mxnet</span></a></li><li><a href="https://www.twitter.com/apachemxnet"><svg class="svg-icon"><use xlink:href="../../../../_static/minima-social-icons.svg#twitter"></use></svg> <span class="username">apachemxnet</span></a></li><li><a href="https://youtube.com/apachemxnet"><svg class="svg-icon"><use xlink:href="../../../../_static/minima-social-icons.svg#youtube"></use></svg> <span class="username">apachemxnet</span></a></li></ul>
</div>
<div class="col-4 footer-text">
<p>A flexible and efficient library for deep learning.</p>
</div>
</div>
</div>
</footer>
<footer class="site-footer2">
<div class="wrapper">
<div class="row">
<div class="col-3">
<img src="../../../../_static/apache_incubator_logo.png" class="footer-logo col-2">
</div>
<div class="footer-bottom-warning col-9">
<p>Apache MXNet is an effort undergoing incubation at <a href="http://www.apache.org/">The Apache Software Foundation</a> (ASF), <span style="font-weight:bold">sponsored by the <i>Apache Incubator</i></span>. Incubation is required
of all newly accepted projects until a further review indicates that the infrastructure,
communications, and decision making process have stabilized in a manner consistent with other
successful ASF projects. While incubation status is not necessarily a reflection of the completeness
or stability of the code, it does indicate that the project has yet to be fully endorsed by the ASF.
</p><p>"Copyright © 2017-2018, The Apache Software Foundation Apache MXNet, MXNet, Apache, the Apache
feather, and the Apache MXNet project logo are either registered trademarks or trademarks of the
Apache Software Foundation."</p>
</div>
</div>
</div>
</footer>
</body>
</html>