blob: 284fe5511ae70a25e00d2e1efdfb8b28bf21e340 [file] [log] [blame]
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta charset="utf-8" />
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta http-equiv="x-ua-compatible" content="ie=edge">
<style>
.dropdown {
position: relative;
display: inline-block;
}
.dropdown-content {
display: none;
position: absolute;
background-color: #f9f9f9;
min-width: 160px;
box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2);
padding: 12px 16px;
z-index: 1;
text-align: left;
}
.dropdown:hover .dropdown-content {
display: block;
}
.dropdown-option:hover {
color: #FF4500;
}
.dropdown-option-active {
color: #FF4500;
font-weight: lighter;
}
.dropdown-option {
color: #000000;
font-weight: lighter;
}
.dropdown-header {
color: #FFFFFF;
display: inline-flex;
}
.dropdown-caret {
width: 18px;
}
.dropdown-caret-path {
fill: #FFFFFF;
}
</style>
<title>mxnet.module.base_module &#8212; Apache MXNet documentation</title>
<link rel="stylesheet" href="../../../_static/basic.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
<link rel="stylesheet" type="text/css" href="../../../_static/mxnet.css" />
<link rel="stylesheet" href="../../../_static/material-design-lite-1.3.0/material.blue-deep_orange.min.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/sphinx_materialdesign_theme.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/fontawesome/all.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/fonts.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/feedback.css" type="text/css" />
<script id="documentation_options" data-url_root="../../../" src="../../../_static/documentation_options.js"></script>
<script src="../../../_static/jquery.js"></script>
<script src="../../../_static/underscore.js"></script>
<script src="../../../_static/doctools.js"></script>
<script src="../../../_static/language_data.js"></script>
<script src="../../../_static/matomo_analytics.js"></script>
<script src="../../../_static/autodoc.js"></script>
<script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script>
<script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/x-mathjax-config">MathJax.Hub.Config({"tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]], "processEscapes": true, "ignoreClass": "document", "processClass": "math|output_area"}})</script>
<link rel="shortcut icon" href="../../../_static/mxnet-icon.png"/>
<link rel="index" title="Index" href="../../../genindex.html" />
<link rel="search" title="Search" href="../../../search.html" />
</head>
<body><header class="site-header" role="banner">
<div class="wrapper">
<a class="site-title" rel="author" href="/versions/1.9.1/"><img
src="../../../_static/mxnet_logo.png" class="site-header-logo"></a>
<nav class="site-nav">
<input type="checkbox" id="nav-trigger" class="nav-trigger"/>
<label for="nav-trigger">
<span class="menu-icon">
<svg viewBox="0 0 18 15" width="18px" height="15px">
<path d="M18,1.484c0,0.82-0.665,1.484-1.484,1.484H1.484C0.665,2.969,0,2.304,0,1.484l0,0C0,0.665,0.665,0,1.484,0 h15.032C17.335,0,18,0.665,18,1.484L18,1.484z M18,7.516C18,8.335,17.335,9,16.516,9H1.484C0.665,9,0,8.335,0,7.516l0,0 c0-0.82,0.665-1.484,1.484-1.484h15.032C17.335,6.031,18,6.696,18,7.516L18,7.516z M18,13.516C18,14.335,17.335,15,16.516,15H1.484 C0.665,15,0,14.335,0,13.516l0,0c0-0.82,0.665-1.483,1.484-1.483h15.032C17.335,12.031,18,12.695,18,13.516L18,13.516z"/>
</svg>
</span>
</label>
<div class="trigger">
<a class="page-link" href="/versions/1.9.1/get_started">Get Started</a>
<a class="page-link" href="/versions/1.9.1/features">Features</a>
<a class="page-link" href="/versions/1.9.1/ecosystem">Ecosystem</a>
<a class="page-link page-current" href="/versions/1.9.1/api">Docs & Tutorials</a>
<a class="page-link" href="/versions/1.9.1/trusted_by">Trusted By</a>
<a class="page-link" href="https://github.com/apache/incubator-mxnet">GitHub</a>
<div class="dropdown" style="min-width:100px">
<span class="dropdown-header">Apache
<svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
</span>
<div class="dropdown-content" style="min-width:250px">
<a href="https://www.apache.org/foundation/">Apache Software Foundation</a>
<a href="https://incubator.apache.org/">Apache Incubator</a>
<a href="https://www.apache.org/licenses/">License</a>
<a href="/versions/1.9.1/api/faq/security.html">Security</a>
<a href="https://privacy.apache.org/policies/privacy-policy-public.html">Privacy</a>
<a href="https://www.apache.org/events/current-event">Events</a>
<a href="https://www.apache.org/foundation/sponsorship.html">Sponsorship</a>
<a href="https://www.apache.org/foundation/thanks.html">Thanks</a>
</div>
</div>
<div class="dropdown">
<span class="dropdown-header">1.9.1
<svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
</span>
<div class="dropdown-content">
<a class="dropdown-option" href="/">master</a><br>
<a class="dropdown-option-active" href="/versions/1.9.1/">1.9.1</a><br>
<a class="dropdown-option" href="/versions/1.8.0/">1.8.0</a><br>
<a class="dropdown-option" href="/versions/1.7.0/">1.7.0</a><br>
<a class="dropdown-option" href="/versions/1.6.0/">1.6.0</a><br>
<a class="dropdown-option" href="/versions/1.5.0/">1.5.0</a><br>
<a class="dropdown-option" href="/versions/1.4.1/">1.4.1</a><br>
<a class="dropdown-option" href="/versions/1.3.1/">1.3.1</a><br>
<a class="dropdown-option" href="/versions/1.2.1/">1.2.1</a><br>
<a class="dropdown-option" href="/versions/1.1.0/">1.1.0</a><br>
<a class="dropdown-option" href="/versions/1.0.0/">1.0.0</a><br>
<a class="dropdown-option" href="/versions/0.12.1/">0.12.1</a><br>
<a class="dropdown-option" href="/versions/0.11.0/">0.11.0</a>
</div>
</div>
</div>
</nav>
</div>
</header>
<div class="mdl-layout mdl-js-layout mdl-layout--fixed-header mdl-layout--fixed-drawer"><header class="mdl-layout__header mdl-layout__header--waterfall ">
<div class="mdl-layout__header-row">
<nav class="mdl-navigation breadcrumb">
<a class="mdl-navigation__link" href="../../index.html">Module code</a><i class="material-icons">navigate_next</i>
<a class="mdl-navigation__link is-active">mxnet.module.base_module</a>
</nav>
<div class="mdl-layout-spacer"></div>
<nav class="mdl-navigation">
<form class="form-inline pull-sm-right" action="../../../search.html" method="get">
<div class="mdl-textfield mdl-js-textfield mdl-textfield--expandable mdl-textfield--floating-label mdl-textfield--align-right">
<label id="quick-search-icon" class="mdl-button mdl-js-button mdl-button--icon" for="waterfall-exp">
<i class="material-icons">search</i>
</label>
<div class="mdl-textfield__expandable-holder">
<input class="mdl-textfield__input" type="text" name="q" id="waterfall-exp" placeholder="Search" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</div>
</div>
<div class="mdl-tooltip" data-mdl-for="quick-search-icon">
Quick search
</div>
</form>
</nav>
</div>
<div class="mdl-layout__header-row header-links">
<div class="mdl-layout-spacer"></div>
<nav class="mdl-navigation">
</nav>
</div>
</header><header class="mdl-layout__drawer">
<div class="globaltoc">
<span class="mdl-layout-title toc">Table Of Contents</span>
<nav class="mdl-navigation">
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../../tutorials/index.html">Python Tutorials</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/getting-started/index.html">Getting Started</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/index.html">Crash Course</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/1-ndarray.html">Manipulate data with <code class="docutils literal notranslate"><span class="pre">ndarray</span></code></a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/2-nn.html">Create a neural network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/3-autograd.html">Automatic differentiation with <code class="docutils literal notranslate"><span class="pre">autograd</span></code></a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/4-train.html">Train the neural network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/5-predict.html">Predict with a pre-trained model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/6-use_gpus.html">Use GPUs</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/to-mxnet/index.html">Moving to MXNet from Other Frameworks</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/to-mxnet/pytorch.html">PyTorch vs Apache MXNet</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/gluon_from_experiment_to_deployment.html">Gluon: from experiment to deployment</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/logistic_regression_explained.html">Logistic regression explained</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/image/mnist.html">MNIST</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/packages/index.html">Packages</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/autograd/index.html">Automatic Differentiation</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/gluon/index.html">Gluon</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/index.html">Blocks</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/custom-layer.html">Custom Layers</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/custom_layer_beginners.html">Customer Layers (Beginners)</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/hybridize.html">Hybridize</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/init.html">Initialization</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/naming.html">Parameter and Block Naming</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/nn.html">Layers and Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/parameters.html">Parameter Management</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/save_load_params.html">Saving and Loading Gluon Models</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/activations/activations.html">Activation Blocks</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/data/index.html">Data Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html">Image Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html#Spatial-Augmentation">Spatial Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html#Color-Augmentation">Color Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html#Composed-Augmentations">Composed Augmentations</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html">Gluon <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s and <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Using-own-data-with-included-Datasets">Using own data with included <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Using-own-data-with-custom-Datasets">Using own data with custom <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Appendix:-Upgrading-from-Module-DataIter-to-Gluon-DataLoader">Appendix: Upgrading from Module <code class="docutils literal notranslate"><span class="pre">DataIter</span></code> to Gluon <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/image/index.html">Image Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/image-augmentation.html">Image Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/info_gan.html">Image similarity search with InfoGAN</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/mnist.html">Handwritten Digit Recognition</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/pretrained_models.html">Using pre-trained models in MXNet</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/index.html">Losses</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/custom-loss.html">Custom Loss Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/kl_divergence.html">Kullback-Leibler (KL) Divergence</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/loss.html">Loss functions</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/text/index.html">Text Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/text/gnmt.html">Google Neural Machine Translation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/text/transformer.html">Machine Translation with Transformer</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/training/index.html">Training</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/fit_api_tutorial.html">MXNet Gluon Fit API</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/trainer.html">Trainer</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/index.html">Learning Rates</a><ul>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_finder.html">Learning Rate Finder</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_schedules.html">Learning Rate Schedules</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_schedules_advanced.html">Advanced Learning Rate Schedules</a></li>
</ul>
</li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/normalization/index.html">Normalization Blocks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/kvstore/index.html">KVStore</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/kvstore/kvstore.html">Distributed Key-Value Store</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/ndarray/index.html">NDArray</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/01-ndarray-intro.html">An Intro: Manipulate Data the MXNet Way with NDArray</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/02-ndarray-operations.html">NDArray Operations</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/03-ndarray-contexts.html">NDArray Contexts</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/gotchas_numpy_in_mxnet.html">Gotchas using NumPy in Apache MXNet</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/index.html">Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/csr.html">CSRNDArray - NDArray in Compressed Sparse Row Storage Format</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/row_sparse.html">RowSparseNDArray - NDArray for Sparse Gradient Updates</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/train.html">Train a Linear Regression Model with Sparse Symbols</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/train_gluon.html">Sparse NDArrays with Gluon</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/onnx/index.html">ONNX</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/fine_tuning_gluon.html">Fine-tuning an ONNX model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/inference_on_onnx_model.html">Running inference on MXNet/Gluon from an ONNX model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/super_resolution.html">Importing an ONNX model into MXNet</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/deploy/export/onnx.html">Export ONNX Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/optimizer/index.html">Optimizers</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/viz/index.html">Visualization</a><ul>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/visualize_graph">Visualize networks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/performance/index.html">Performance</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/performance/compression/index.html">Compression</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/compression/int8.html">Deploy with int-8</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/float16">Float16</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/gradient_compression">Gradient Compression</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/int8_inference.html">GluonCV with Quantized Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/performance/backend/index.html">Accelerated Backend Tools</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/index.html">Intel MKL-DNN</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/mkldnn_quantization.html">Quantize with MKL-DNN backend</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/mkldnn_quantization.html#Improving-accuracy-with-Intel®-Neural-Compressor">Improving accuracy with Intel® Neural Compressor</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/mkldnn_readme.html">Install MXNet with MKL-DNN</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/tensorrt/index.html">TensorRT</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/tensorrt/tensorrt.html">Optimizing Deep Learning Computation Graphs with TensorRT</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/tvm.html">Use TVM</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/profiler.html">Profiling MXNet Models</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/amp.html">Using AMP: Automatic Mixed Precision</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/deploy/index.html">Deployment</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/export/index.html">Export</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/export/onnx.html">Exporting to ONNX format</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/export_network.html">Export Gluon CV Models</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/blocks/save_load_params.html">Save / Load Parameters</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/inference/index.html">Inference</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/cpp.html">Deploy into C++</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/image_classification_jetson.html">Image Classication using pretrained ResNet-50 model on Jetson module</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/scala.html">Deploy into a Java or Scala Environment</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/wine_detector.html">Real-time Object Detection with MXNet On The Raspberry Pi</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/index.html">Run on AWS</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/use_ec2.html">Run on an EC2 Instance</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/use_sagemaker.html">Run on Amazon SageMaker</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/cloud.html">MXNet on the Cloud</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/extend/index.html">Extend</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/extend/custom_layer.html">Custom Layers</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/extend/customop.html">Custom Numpy Operators</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/new_op">New Operator Creation</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/add_op_in_backend">New Operator in MXNet Backend</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../../api/index.html">Python API</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../api/ndarray/index.html">mxnet.ndarray</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/ndarray.html">ndarray</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/contrib/index.html">ndarray.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/image/index.html">ndarray.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/linalg/index.html">ndarray.linalg</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/op/index.html">ndarray.op</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/random/index.html">ndarray.random</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/register/index.html">ndarray.register</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/sparse/index.html">ndarray.sparse</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/utils/index.html">ndarray.utils</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/gluon/index.html">mxnet.gluon</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/block.html">gluon.Block</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/hybrid_block.html">gluon.HybridBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/symbol_block.html">gluon.SymbolBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/constant.html">gluon.Constant</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/parameter.html">gluon.Parameter</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/parameter_dict.html">gluon.ParameterDict</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/trainer.html">gluon.Trainer</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/contrib/index.html">gluon.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/data/index.html">gluon.data</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/gluon/data/vision/index.html">data.vision</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/gluon/data/vision/datasets/index.html">vision.datasets</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/gluon/data/vision/transforms/index.html">vision.transforms</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/loss/index.html">gluon.loss</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/model_zoo/index.html">gluon.model_zoo.vision</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/nn/index.html">gluon.nn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/rnn/index.html">gluon.rnn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/utils/index.html">gluon.utils</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/autograd/index.html">mxnet.autograd</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/initializer/index.html">mxnet.initializer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/optimizer/index.html">mxnet.optimizer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/lr_scheduler/index.html">mxnet.lr_scheduler</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/metric/index.html">mxnet.metric</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore/index.html">mxnet.kvstore</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/symbol/index.html">mxnet.symbol</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/symbol.html">symbol</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/contrib/index.html">symbol.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/image/index.html">symbol.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/linalg/index.html">symbol.linalg</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/op/index.html">symbol.op</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/random/index.html">symbol.random</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/register/index.html">symbol.register</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/sparse/index.html">symbol.sparse</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/module/index.html">mxnet.module</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/contrib/index.html">mxnet.contrib</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/autograd/index.html">contrib.autograd</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/io/index.html">contrib.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/ndarray/index.html">contrib.ndarray</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/onnx/index.html">contrib.onnx</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/quantization/index.html">contrib.quantization</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/symbol/index.html">contrib.symbol</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/tensorboard/index.html">contrib.tensorboard</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/tensorrt/index.html">contrib.tensorrt</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/text/index.html">contrib.text</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/mxnet/index.html">mxnet</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/attribute/index.html">mxnet.attribute</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/base/index.html">mxnet.base</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/callback/index.html">mxnet.callback</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/context/index.html">mxnet.context</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/engine/index.html">mxnet.engine</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/executor/index.html">mxnet.executor</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/executor_manager/index.html">mxnet.executor_manager</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/image/index.html">mxnet.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/io/index.html">mxnet.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/kvstore_server/index.html">mxnet.kvstore_server</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/libinfo/index.html">mxnet.libinfo</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/log/index.html">mxnet.log</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/model/index.html">mxnet.model</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/monitor/index.html">mxnet.monitor</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/name/index.html">mxnet.name</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/notebook/index.html">mxnet.notebook</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/operator/index.html">mxnet.operator</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/profiler/index.html">mxnet.profiler</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/random/index.html">mxnet.random</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/recordio/index.html">mxnet.recordio</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/registry/index.html">mxnet.registry</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/rtc/index.html">mxnet.rtc</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/runtime/index.html">mxnet.runtime</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/test_utils/index.html">mxnet.test_utils</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/torch/index.html">mxnet.torch</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/util/index.html">mxnet.util</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/visualization/index.html">mxnet.visualization</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</nav>
</div>
</header>
<main class="mdl-layout__content" tabIndex="0">
<script type="text/javascript" src="../../../_static/sphinx_materialdesign_theme.js "></script>
<script type="text/javascript" src="../../../_static/feedback.js"></script>
<header class="mdl-layout__drawer">
<div class="globaltoc">
<span class="mdl-layout-title toc">Table Of Contents</span>
<nav class="mdl-navigation">
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../../tutorials/index.html">Python Tutorials</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/getting-started/index.html">Getting Started</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/index.html">Crash Course</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/1-ndarray.html">Manipulate data with <code class="docutils literal notranslate"><span class="pre">ndarray</span></code></a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/2-nn.html">Create a neural network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/3-autograd.html">Automatic differentiation with <code class="docutils literal notranslate"><span class="pre">autograd</span></code></a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/4-train.html">Train the neural network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/5-predict.html">Predict with a pre-trained model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/6-use_gpus.html">Use GPUs</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/to-mxnet/index.html">Moving to MXNet from Other Frameworks</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/to-mxnet/pytorch.html">PyTorch vs Apache MXNet</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/gluon_from_experiment_to_deployment.html">Gluon: from experiment to deployment</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/logistic_regression_explained.html">Logistic regression explained</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/image/mnist.html">MNIST</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/packages/index.html">Packages</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/autograd/index.html">Automatic Differentiation</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/gluon/index.html">Gluon</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/index.html">Blocks</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/custom-layer.html">Custom Layers</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/custom_layer_beginners.html">Customer Layers (Beginners)</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/hybridize.html">Hybridize</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/init.html">Initialization</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/naming.html">Parameter and Block Naming</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/nn.html">Layers and Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/parameters.html">Parameter Management</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/save_load_params.html">Saving and Loading Gluon Models</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/activations/activations.html">Activation Blocks</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/data/index.html">Data Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html">Image Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html#Spatial-Augmentation">Spatial Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html#Color-Augmentation">Color Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html#Composed-Augmentations">Composed Augmentations</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html">Gluon <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s and <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Using-own-data-with-included-Datasets">Using own data with included <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Using-own-data-with-custom-Datasets">Using own data with custom <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Appendix:-Upgrading-from-Module-DataIter-to-Gluon-DataLoader">Appendix: Upgrading from Module <code class="docutils literal notranslate"><span class="pre">DataIter</span></code> to Gluon <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/image/index.html">Image Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/image-augmentation.html">Image Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/info_gan.html">Image similarity search with InfoGAN</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/mnist.html">Handwritten Digit Recognition</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/pretrained_models.html">Using pre-trained models in MXNet</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/index.html">Losses</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/custom-loss.html">Custom Loss Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/kl_divergence.html">Kullback-Leibler (KL) Divergence</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/loss.html">Loss functions</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/text/index.html">Text Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/text/gnmt.html">Google Neural Machine Translation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/text/transformer.html">Machine Translation with Transformer</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/training/index.html">Training</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/fit_api_tutorial.html">MXNet Gluon Fit API</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/trainer.html">Trainer</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/index.html">Learning Rates</a><ul>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_finder.html">Learning Rate Finder</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_schedules.html">Learning Rate Schedules</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_schedules_advanced.html">Advanced Learning Rate Schedules</a></li>
</ul>
</li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/normalization/index.html">Normalization Blocks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/kvstore/index.html">KVStore</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/kvstore/kvstore.html">Distributed Key-Value Store</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/ndarray/index.html">NDArray</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/01-ndarray-intro.html">An Intro: Manipulate Data the MXNet Way with NDArray</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/02-ndarray-operations.html">NDArray Operations</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/03-ndarray-contexts.html">NDArray Contexts</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/gotchas_numpy_in_mxnet.html">Gotchas using NumPy in Apache MXNet</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/index.html">Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/csr.html">CSRNDArray - NDArray in Compressed Sparse Row Storage Format</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/row_sparse.html">RowSparseNDArray - NDArray for Sparse Gradient Updates</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/train.html">Train a Linear Regression Model with Sparse Symbols</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/train_gluon.html">Sparse NDArrays with Gluon</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/onnx/index.html">ONNX</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/fine_tuning_gluon.html">Fine-tuning an ONNX model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/inference_on_onnx_model.html">Running inference on MXNet/Gluon from an ONNX model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/super_resolution.html">Importing an ONNX model into MXNet</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/deploy/export/onnx.html">Export ONNX Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/optimizer/index.html">Optimizers</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/viz/index.html">Visualization</a><ul>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/visualize_graph">Visualize networks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/performance/index.html">Performance</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/performance/compression/index.html">Compression</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/compression/int8.html">Deploy with int-8</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/float16">Float16</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/gradient_compression">Gradient Compression</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/int8_inference.html">GluonCV with Quantized Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/performance/backend/index.html">Accelerated Backend Tools</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/index.html">Intel MKL-DNN</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/mkldnn_quantization.html">Quantize with MKL-DNN backend</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/mkldnn_quantization.html#Improving-accuracy-with-Intel®-Neural-Compressor">Improving accuracy with Intel® Neural Compressor</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/mkldnn_readme.html">Install MXNet with MKL-DNN</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/tensorrt/index.html">TensorRT</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/tensorrt/tensorrt.html">Optimizing Deep Learning Computation Graphs with TensorRT</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/tvm.html">Use TVM</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/profiler.html">Profiling MXNet Models</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/amp.html">Using AMP: Automatic Mixed Precision</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/deploy/index.html">Deployment</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/export/index.html">Export</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/export/onnx.html">Exporting to ONNX format</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/export_network.html">Export Gluon CV Models</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/blocks/save_load_params.html">Save / Load Parameters</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/inference/index.html">Inference</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/cpp.html">Deploy into C++</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/image_classification_jetson.html">Image Classication using pretrained ResNet-50 model on Jetson module</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/scala.html">Deploy into a Java or Scala Environment</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/wine_detector.html">Real-time Object Detection with MXNet On The Raspberry Pi</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/index.html">Run on AWS</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/use_ec2.html">Run on an EC2 Instance</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/use_sagemaker.html">Run on Amazon SageMaker</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/cloud.html">MXNet on the Cloud</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/extend/index.html">Extend</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/extend/custom_layer.html">Custom Layers</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/extend/customop.html">Custom Numpy Operators</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/new_op">New Operator Creation</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/add_op_in_backend">New Operator in MXNet Backend</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../../api/index.html">Python API</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../api/ndarray/index.html">mxnet.ndarray</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/ndarray.html">ndarray</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/contrib/index.html">ndarray.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/image/index.html">ndarray.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/linalg/index.html">ndarray.linalg</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/op/index.html">ndarray.op</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/random/index.html">ndarray.random</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/register/index.html">ndarray.register</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/sparse/index.html">ndarray.sparse</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/utils/index.html">ndarray.utils</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/gluon/index.html">mxnet.gluon</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/block.html">gluon.Block</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/hybrid_block.html">gluon.HybridBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/symbol_block.html">gluon.SymbolBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/constant.html">gluon.Constant</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/parameter.html">gluon.Parameter</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/parameter_dict.html">gluon.ParameterDict</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/trainer.html">gluon.Trainer</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/contrib/index.html">gluon.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/data/index.html">gluon.data</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/gluon/data/vision/index.html">data.vision</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/gluon/data/vision/datasets/index.html">vision.datasets</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/gluon/data/vision/transforms/index.html">vision.transforms</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/loss/index.html">gluon.loss</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/model_zoo/index.html">gluon.model_zoo.vision</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/nn/index.html">gluon.nn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/rnn/index.html">gluon.rnn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/utils/index.html">gluon.utils</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/autograd/index.html">mxnet.autograd</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/initializer/index.html">mxnet.initializer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/optimizer/index.html">mxnet.optimizer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/lr_scheduler/index.html">mxnet.lr_scheduler</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/metric/index.html">mxnet.metric</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore/index.html">mxnet.kvstore</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/symbol/index.html">mxnet.symbol</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/symbol.html">symbol</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/contrib/index.html">symbol.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/image/index.html">symbol.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/linalg/index.html">symbol.linalg</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/op/index.html">symbol.op</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/random/index.html">symbol.random</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/register/index.html">symbol.register</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/sparse/index.html">symbol.sparse</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/module/index.html">mxnet.module</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/contrib/index.html">mxnet.contrib</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/autograd/index.html">contrib.autograd</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/io/index.html">contrib.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/ndarray/index.html">contrib.ndarray</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/onnx/index.html">contrib.onnx</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/quantization/index.html">contrib.quantization</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/symbol/index.html">contrib.symbol</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/tensorboard/index.html">contrib.tensorboard</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/tensorrt/index.html">contrib.tensorrt</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/text/index.html">contrib.text</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/mxnet/index.html">mxnet</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/attribute/index.html">mxnet.attribute</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/base/index.html">mxnet.base</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/callback/index.html">mxnet.callback</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/context/index.html">mxnet.context</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/engine/index.html">mxnet.engine</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/executor/index.html">mxnet.executor</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/executor_manager/index.html">mxnet.executor_manager</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/image/index.html">mxnet.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/io/index.html">mxnet.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/kvstore_server/index.html">mxnet.kvstore_server</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/libinfo/index.html">mxnet.libinfo</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/log/index.html">mxnet.log</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/model/index.html">mxnet.model</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/monitor/index.html">mxnet.monitor</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/name/index.html">mxnet.name</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/notebook/index.html">mxnet.notebook</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/operator/index.html">mxnet.operator</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/profiler/index.html">mxnet.profiler</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/random/index.html">mxnet.random</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/recordio/index.html">mxnet.recordio</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/registry/index.html">mxnet.registry</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/rtc/index.html">mxnet.rtc</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/runtime/index.html">mxnet.runtime</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/test_utils/index.html">mxnet.test_utils</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/torch/index.html">mxnet.torch</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/util/index.html">mxnet.util</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/visualization/index.html">mxnet.visualization</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</nav>
</div>
</header>
<div class="document">
<div class="page-content" role="main">
<h1>Source code for mxnet.module.base_module</h1><div class="highlight"><pre>
<span></span><span class="c1"># Licensed to the Apache Software Foundation (ASF) under one</span>
<span class="c1"># or more contributor license agreements. See the NOTICE file</span>
<span class="c1"># distributed with this work for additional information</span>
<span class="c1"># regarding copyright ownership. The ASF licenses this file</span>
<span class="c1"># to you under the Apache License, Version 2.0 (the</span>
<span class="c1"># &quot;License&quot;); you may not use this file except in compliance</span>
<span class="c1"># with the License. You may obtain a copy of the License at</span>
<span class="c1">#</span>
<span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="c1">#</span>
<span class="c1"># Unless required by applicable law or agreed to in writing,</span>
<span class="c1"># software distributed under the License is distributed on an</span>
<span class="c1"># &quot;AS IS&quot; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY</span>
<span class="c1"># KIND, either express or implied. See the License for the</span>
<span class="c1"># specific language governing permissions and limitations</span>
<span class="c1"># under the License.</span>
<span class="c1"># pylint: disable=fixme, too-many-arguments, too-many-locals, no-else-raise</span>
<span class="c1"># pylint: disable=too-many-public-methods, too-many-branches, too-many-lines</span>
<span class="sd">&quot;&quot;&quot;`BaseModule` defines an API for modules.&quot;&quot;&quot;</span>
<span class="kn">import</span> <span class="nn">time</span>
<span class="kn">import</span> <span class="nn">logging</span>
<span class="kn">import</span> <span class="nn">warnings</span>
<span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="kn">from</span> <span class="nn">..</span> <span class="kn">import</span> <span class="n">metric</span>
<span class="kn">from</span> <span class="nn">..</span> <span class="kn">import</span> <span class="n">ndarray</span>
<span class="kn">from</span> <span class="nn">..context</span> <span class="kn">import</span> <span class="n">cpu</span>
<span class="kn">from</span> <span class="nn">..model</span> <span class="kn">import</span> <span class="n">BatchEndParam</span>
<span class="kn">from</span> <span class="nn">..initializer</span> <span class="kn">import</span> <span class="n">Uniform</span>
<span class="kn">from</span> <span class="nn">..io</span> <span class="kn">import</span> <span class="n">DataDesc</span><span class="p">,</span> <span class="n">DataIter</span><span class="p">,</span> <span class="n">DataBatch</span>
<span class="kn">from</span> <span class="nn">..base</span> <span class="kn">import</span> <span class="n">_as_list</span>
<span class="k">def</span> <span class="nf">_check_input_names</span><span class="p">(</span><span class="n">symbol</span><span class="p">,</span> <span class="n">names</span><span class="p">,</span> <span class="n">typename</span><span class="p">,</span> <span class="n">throw</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Check that all input names are in symbol&#39;s arguments.&quot;&quot;&quot;</span>
<span class="n">args</span> <span class="o">=</span> <span class="n">symbol</span><span class="o">.</span><span class="n">list_arguments</span><span class="p">()</span>
<span class="k">for</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">names</span><span class="p">:</span>
<span class="k">if</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">args</span><span class="p">:</span>
<span class="k">continue</span>
<span class="n">candidates</span> <span class="o">=</span> <span class="p">[</span><span class="n">arg</span> <span class="k">for</span> <span class="n">arg</span> <span class="ow">in</span> <span class="n">args</span> <span class="k">if</span>
<span class="ow">not</span> <span class="n">arg</span><span class="o">.</span><span class="n">endswith</span><span class="p">(</span><span class="s1">&#39;_weight&#39;</span><span class="p">)</span> <span class="ow">and</span>
<span class="ow">not</span> <span class="n">arg</span><span class="o">.</span><span class="n">endswith</span><span class="p">(</span><span class="s1">&#39;_bias&#39;</span><span class="p">)</span> <span class="ow">and</span>
<span class="ow">not</span> <span class="n">arg</span><span class="o">.</span><span class="n">endswith</span><span class="p">(</span><span class="s1">&#39;_gamma&#39;</span><span class="p">)</span> <span class="ow">and</span>
<span class="ow">not</span> <span class="n">arg</span><span class="o">.</span><span class="n">endswith</span><span class="p">(</span><span class="s1">&#39;_beta&#39;</span><span class="p">)]</span>
<span class="n">msg</span> <span class="o">=</span> <span class="s2">&quot;</span><span class="se">\033</span><span class="s2">[91mYou created Module with Module(..., </span><span class="si">%s</span><span class="s2">_names=</span><span class="si">%s</span><span class="s2">) but &quot;</span> \
<span class="s2">&quot;input with name &#39;</span><span class="si">%s</span><span class="s2">&#39; is not found in symbol.list_arguments(). &quot;</span> \
<span class="s2">&quot;Did you mean one of:</span><span class="se">\n\t</span><span class="si">%s</span><span class="se">\033</span><span class="s2">[0m&quot;</span><span class="o">%</span><span class="p">(</span>
<span class="n">typename</span><span class="p">,</span> <span class="nb">str</span><span class="p">(</span><span class="n">names</span><span class="p">),</span> <span class="n">name</span><span class="p">,</span> <span class="s1">&#39;</span><span class="se">\n\t</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">candidates</span><span class="p">))</span>
<span class="k">if</span> <span class="n">throw</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="n">msg</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="n">msg</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_check_names_match</span><span class="p">(</span><span class="n">data_names</span><span class="p">,</span> <span class="n">data_shapes</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">throw</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Check that input names matches input data descriptors.&quot;&quot;&quot;</span>
<span class="n">actual</span> <span class="o">=</span> <span class="p">[</span><span class="n">x</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="k">for</span> <span class="n">x</span> <span class="ow">in</span> <span class="n">data_shapes</span><span class="p">]</span>
<span class="k">if</span> <span class="nb">sorted</span><span class="p">(</span><span class="n">data_names</span><span class="p">)</span> <span class="o">!=</span> <span class="nb">sorted</span><span class="p">(</span><span class="n">actual</span><span class="p">):</span>
<span class="n">msg</span> <span class="o">=</span> <span class="s2">&quot;Data provided by </span><span class="si">%s</span><span class="s2">_shapes don&#39;t match names specified by </span><span class="si">%s</span><span class="s2">_names (</span><span class="si">%s</span><span class="s2"> vs. </span><span class="si">%s</span><span class="s2">)&quot;</span><span class="o">%</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="nb">str</span><span class="p">(</span><span class="n">data_shapes</span><span class="p">),</span> <span class="nb">str</span><span class="p">(</span><span class="n">data_names</span><span class="p">))</span>
<span class="k">if</span> <span class="n">throw</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="n">msg</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="n">msg</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_parse_data_desc</span><span class="p">(</span><span class="n">data_names</span><span class="p">,</span> <span class="n">label_names</span><span class="p">,</span> <span class="n">data_shapes</span><span class="p">,</span> <span class="n">label_shapes</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;parse data_attrs into DataDesc format and check that names match&quot;&quot;&quot;</span>
<span class="n">data_shapes</span> <span class="o">=</span> <span class="p">[</span><span class="n">x</span> <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">DataDesc</span><span class="p">)</span> <span class="k">else</span> <span class="n">DataDesc</span><span class="p">(</span><span class="o">*</span><span class="n">x</span><span class="p">)</span> <span class="k">for</span> <span class="n">x</span> <span class="ow">in</span> <span class="n">data_shapes</span><span class="p">]</span>
<span class="n">_check_names_match</span><span class="p">(</span><span class="n">data_names</span><span class="p">,</span> <span class="n">data_shapes</span><span class="p">,</span> <span class="s1">&#39;data&#39;</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="k">if</span> <span class="n">label_shapes</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">label_shapes</span> <span class="o">=</span> <span class="p">[</span><span class="n">x</span> <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">x</span><span class="p">,</span> <span class="n">DataDesc</span><span class="p">)</span> <span class="k">else</span> <span class="n">DataDesc</span><span class="p">(</span><span class="o">*</span><span class="n">x</span><span class="p">)</span> <span class="k">for</span> <span class="n">x</span> <span class="ow">in</span> <span class="n">label_shapes</span><span class="p">]</span>
<span class="n">_check_names_match</span><span class="p">(</span><span class="n">label_names</span><span class="p">,</span> <span class="n">label_shapes</span><span class="p">,</span> <span class="s1">&#39;label&#39;</span><span class="p">,</span> <span class="kc">False</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">_check_names_match</span><span class="p">(</span><span class="n">label_names</span><span class="p">,</span> <span class="p">[],</span> <span class="s1">&#39;label&#39;</span><span class="p">,</span> <span class="kc">False</span><span class="p">)</span>
<span class="k">return</span> <span class="n">data_shapes</span><span class="p">,</span> <span class="n">label_shapes</span>
<div class="viewcode-block" id="BaseModule"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule">[docs]</a><span class="k">class</span> <span class="nc">BaseModule</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;The base class of a module.</span>
<span class="sd"> A module represents a computation component. One can think of module as a computation machine.</span>
<span class="sd"> A module can execute forward and backward passes and update parameters in a model.</span>
<span class="sd"> We aim to make the APIs easy to use, especially in the case when we need to use the imperative</span>
<span class="sd"> API to work with multiple modules (e.g. stochastic depth network).</span>
<span class="sd"> A module has several states:</span>
<span class="sd"> - Initial state: Memory is not allocated yet, so the module is not ready for computation yet.</span>
<span class="sd"> - Binded: Shapes for inputs, outputs, and parameters are all known, memory has been allocated,</span>
<span class="sd"> and the module is ready for computation.</span>
<span class="sd"> - Parameters are initialized: For modules with parameters, doing computation before</span>
<span class="sd"> initializing the parameters might result in undefined outputs.</span>
<span class="sd"> - Optimizer is installed: An optimizer can be installed to a module. After this, the parameters</span>
<span class="sd"> of the module can be updated according to the optimizer after gradients are computed</span>
<span class="sd"> (forward-backward).</span>
<span class="sd"> In order for a module to interact with others, it must be able to report the</span>
<span class="sd"> following information in its initial state (before binding):</span>
<span class="sd"> - `data_names`: list of type string indicating the names of the required input data.</span>
<span class="sd"> - `output_names`: list of type string indicating the names of the required outputs.</span>
<span class="sd"> After binding, a module should be able to report the following richer information:</span>
<span class="sd"> - state information</span>
<span class="sd"> - `binded`: `bool`, indicates whether the memory buffers needed for computation</span>
<span class="sd"> have been allocated.</span>
<span class="sd"> - `for_training`: whether the module is bound for training.</span>
<span class="sd"> - `params_initialized`: `bool`, indicates whether the parameters of this module</span>
<span class="sd"> have been initialized.</span>
<span class="sd"> - `optimizer_initialized`: `bool`, indicates whether an optimizer is defined</span>
<span class="sd"> and initialized.</span>
<span class="sd"> - `inputs_need_grad`: `bool`, indicates whether gradients with respect to the</span>
<span class="sd"> input data are needed. Might be useful when implementing composition of modules.</span>
<span class="sd"> - input/output information</span>
<span class="sd"> - `data_shapes`: a list of `(name, shape)`. In theory, since the memory is allocated,</span>
<span class="sd"> we could directly provide the data arrays. But in the case of data parallelism,</span>
<span class="sd"> the data arrays might not be of the same shape as viewed from the external world.</span>
<span class="sd"> - `label_shapes`: a list of `(name, shape)`. This might be `[]` if the module does</span>
<span class="sd"> not need labels (e.g. it does not contains a loss function at the top), or a module</span>
<span class="sd"> is not bound for training.</span>
<span class="sd"> - `output_shapes`: a list of `(name, shape)` for outputs of the module.</span>
<span class="sd"> - parameters (for modules with parameters)</span>
<span class="sd"> - `get_params()`: return a tuple `(arg_params, aux_params)`. Each of those</span>
<span class="sd"> is a dictionary of name to ``NDArray`` mapping. Those `NDArray` always lives on</span>
<span class="sd"> CPU. The actual parameters used for computing might live on other devices (GPUs),</span>
<span class="sd"> this function will retrieve (a copy of) the latest parameters.</span>
<span class="sd"> - ``set_params(arg_params, aux_params)``: assign parameters to the devices</span>
<span class="sd"> doing the computation.</span>
<span class="sd"> - ``init_params(...)``: a more flexible interface to assign or initialize the parameters.</span>
<span class="sd"> - setup</span>
<span class="sd"> - `bind()`: prepare environment for computation.</span>
<span class="sd"> - `init_optimizer()`: install optimizer for parameter updating.</span>
<span class="sd"> - `prepare()`: prepare the module based on the current data batch.</span>
<span class="sd"> - computation</span>
<span class="sd"> - `forward(data_batch)`: forward operation.</span>
<span class="sd"> - `backward(out_grads=None)`: backward operation.</span>
<span class="sd"> - `update()`: update parameters according to installed optimizer.</span>
<span class="sd"> - `get_outputs()`: get outputs of the previous forward operation.</span>
<span class="sd"> - `get_input_grads()`: get the gradients with respect to the inputs computed</span>
<span class="sd"> in the previous backward operation.</span>
<span class="sd"> - `update_metric(metric, labels, pre_sliced=False)`: update performance metric</span>
<span class="sd"> for the previous forward</span>
<span class="sd"> computed results.</span>
<span class="sd"> - other properties (mostly for backward compatibility)</span>
<span class="sd"> - `symbol`: the underlying symbolic graph for this module (if any)</span>
<span class="sd"> This property is not necessarily constant. For example, for `BucketingModule`,</span>
<span class="sd"> this property is simply the *current* symbol being used. For other modules,</span>
<span class="sd"> this value might not be well defined.</span>
<span class="sd"> When those intermediate-level API are implemented properly, the following</span>
<span class="sd"> high-level API will be automatically available for a module:</span>
<span class="sd"> - `fit`: train the module parameters on a data set.</span>
<span class="sd"> - `predict`: run prediction on a data set and collect outputs.</span>
<span class="sd"> - `score`: run prediction on a data set and evaluate performance.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of creating a mxnet module.</span>
<span class="sd"> &gt;&gt;&gt; import mxnet as mx</span>
<span class="sd"> &gt;&gt;&gt; data = mx.symbol.Variable(&#39;data&#39;)</span>
<span class="sd"> &gt;&gt;&gt; fc1 = mx.symbol.FullyConnected(data, name=&#39;fc1&#39;, num_hidden=128)</span>
<span class="sd"> &gt;&gt;&gt; act1 = mx.symbol.Activation(fc1, name=&#39;relu1&#39;, act_type=&quot;relu&quot;)</span>
<span class="sd"> &gt;&gt;&gt; fc2 = mx.symbol.FullyConnected(act1, name = &#39;fc2&#39;, num_hidden = 64)</span>
<span class="sd"> &gt;&gt;&gt; act2 = mx.symbol.Activation(fc2, name=&#39;relu2&#39;, act_type=&quot;relu&quot;)</span>
<span class="sd"> &gt;&gt;&gt; fc3 = mx.symbol.FullyConnected(act2, name=&#39;fc3&#39;, num_hidden=10)</span>
<span class="sd"> &gt;&gt;&gt; out = mx.symbol.SoftmaxOutput(fc3, name = &#39;softmax&#39;)</span>
<span class="sd"> &gt;&gt;&gt; mod = mx.mod.Module(out)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">logger</span><span class="o">=</span><span class="n">logging</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">logger</span> <span class="o">=</span> <span class="n">logger</span>
<span class="bp">self</span><span class="o">.</span><span class="n">binded</span> <span class="o">=</span> <span class="kc">False</span>
<span class="bp">self</span><span class="o">.</span><span class="n">for_training</span> <span class="o">=</span> <span class="kc">False</span>
<span class="bp">self</span><span class="o">.</span><span class="n">inputs_need_grad</span> <span class="o">=</span> <span class="kc">False</span>
<span class="bp">self</span><span class="o">.</span><span class="n">params_initialized</span> <span class="o">=</span> <span class="kc">False</span>
<span class="bp">self</span><span class="o">.</span><span class="n">optimizer_initialized</span> <span class="o">=</span> <span class="kc">False</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_symbol</span> <span class="o">=</span> <span class="kc">None</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_total_exec_bytes</span> <span class="o">=</span> <span class="mi">0</span>
<span class="c1">################################################################################</span>
<span class="c1"># High Level API</span>
<span class="c1">################################################################################</span>
<div class="viewcode-block" id="BaseModule.forward_backward"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.forward_backward">[docs]</a> <span class="k">def</span> <span class="nf">forward_backward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">data_batch</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;A convenient function that calls both ``forward`` and ``backward``.&quot;&quot;&quot;</span>
<span class="bp">self</span><span class="o">.</span><span class="n">forward</span><span class="p">(</span><span class="n">data_batch</span><span class="p">,</span> <span class="n">is_train</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">backward</span><span class="p">()</span></div>
<div class="viewcode-block" id="BaseModule.score"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.score">[docs]</a> <span class="k">def</span> <span class="nf">score</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">eval_data</span><span class="p">,</span> <span class="n">eval_metric</span><span class="p">,</span> <span class="n">num_batch</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">batch_end_callback</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">score_end_callback</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">reset</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">epoch</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">sparse_row_id_fn</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Runs prediction on ``eval_data`` and evaluates the performance according to</span>
<span class="sd"> the given ``eval_metric``.</span>
<span class="sd"> Checkout `Module Tutorial &lt;https://mxnet.apache.org/api/python/tutorials/packages/module/index.html&gt;`_</span>
<span class="sd"> to see an end-to-end use-case.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> eval_data : DataIter</span>
<span class="sd"> Evaluation data to run prediction on.</span>
<span class="sd"> eval_metric : EvalMetric or list of EvalMetrics</span>
<span class="sd"> Evaluation metric to use.</span>
<span class="sd"> num_batch : int</span>
<span class="sd"> Number of batches to run. Defaults to ``None``, indicating run until the `DataIter`</span>
<span class="sd"> finishes.</span>
<span class="sd"> batch_end_callback : function</span>
<span class="sd"> Could also be a list of functions.</span>
<span class="sd"> reset : bool</span>
<span class="sd"> Defaults to ``True``. Indicates whether we should reset `eval_data` before starting</span>
<span class="sd"> evaluating.</span>
<span class="sd"> epoch : int</span>
<span class="sd"> Defaults to 0. For compatibility, this will be passed to callbacks (if any).</span>
<span class="sd"> During training, this will correspond to the training epoch number.</span>
<span class="sd"> sparse_row_id_fn : A callback function</span>
<span class="sd"> The function takes `data_batch` as an input and returns a dict of</span>
<span class="sd"> str -&gt; NDArray. The resulting dict is used for pulling row_sparse</span>
<span class="sd"> parameters from the kvstore, where the str key is the name of the param,</span>
<span class="sd"> and the value is the row id of the param to pull.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of using score for prediction.</span>
<span class="sd"> &gt;&gt;&gt; # Evaluate accuracy on val_dataiter</span>
<span class="sd"> &gt;&gt;&gt; metric = mx.metric.Accuracy()</span>
<span class="sd"> &gt;&gt;&gt; mod.score(val_dataiter, metric)</span>
<span class="sd"> &gt;&gt;&gt; mod.score(val_dataiter, [&#39;mse&#39;, &#39;acc&#39;])</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">assert</span> <span class="bp">self</span><span class="o">.</span><span class="n">binded</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">params_initialized</span>
<span class="k">if</span> <span class="n">reset</span><span class="p">:</span>
<span class="n">eval_data</span><span class="o">.</span><span class="n">reset</span><span class="p">()</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">eval_metric</span><span class="p">,</span> <span class="n">metric</span><span class="o">.</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="n">eval_metric</span> <span class="o">=</span> <span class="n">metric</span><span class="o">.</span><span class="n">create</span><span class="p">(</span><span class="n">eval_metric</span><span class="p">)</span>
<span class="n">eval_metric</span><span class="o">.</span><span class="n">reset</span><span class="p">()</span>
<span class="n">actual_num_batch</span> <span class="o">=</span> <span class="mi">0</span>
<span class="k">for</span> <span class="n">nbatch</span><span class="p">,</span> <span class="n">eval_batch</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">eval_data</span><span class="p">):</span>
<span class="k">if</span> <span class="n">num_batch</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span> <span class="ow">and</span> <span class="n">nbatch</span> <span class="o">==</span> <span class="n">num_batch</span><span class="p">:</span>
<span class="k">break</span>
<span class="bp">self</span><span class="o">.</span><span class="n">prepare</span><span class="p">(</span><span class="n">eval_batch</span><span class="p">,</span> <span class="n">sparse_row_id_fn</span><span class="o">=</span><span class="n">sparse_row_id_fn</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">forward</span><span class="p">(</span><span class="n">eval_batch</span><span class="p">,</span> <span class="n">is_train</span><span class="o">=</span><span class="kc">False</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">eval_batch</span><span class="p">,</span> <span class="nb">list</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">update_metric</span><span class="p">(</span><span class="n">eval_metric</span><span class="p">,</span> <span class="p">[</span><span class="n">eb</span><span class="o">.</span><span class="n">label</span> <span class="k">for</span> <span class="n">eb</span> <span class="ow">in</span> <span class="n">eval_batch</span><span class="p">],</span> <span class="n">pre_sliced</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">update_metric</span><span class="p">(</span><span class="n">eval_metric</span><span class="p">,</span> <span class="n">eval_batch</span><span class="o">.</span><span class="n">label</span><span class="p">)</span>
<span class="k">if</span> <span class="n">batch_end_callback</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">batch_end_params</span> <span class="o">=</span> <span class="n">BatchEndParam</span><span class="p">(</span><span class="n">epoch</span><span class="o">=</span><span class="n">epoch</span><span class="p">,</span>
<span class="n">nbatch</span><span class="o">=</span><span class="n">nbatch</span><span class="p">,</span>
<span class="n">eval_metric</span><span class="o">=</span><span class="n">eval_metric</span><span class="p">,</span>
<span class="nb">locals</span><span class="o">=</span><span class="nb">locals</span><span class="p">())</span>
<span class="k">for</span> <span class="n">callback</span> <span class="ow">in</span> <span class="n">_as_list</span><span class="p">(</span><span class="n">batch_end_callback</span><span class="p">):</span>
<span class="n">callback</span><span class="p">(</span><span class="n">batch_end_params</span><span class="p">)</span>
<span class="n">actual_num_batch</span> <span class="o">+=</span> <span class="mi">1</span>
<span class="k">if</span> <span class="n">score_end_callback</span><span class="p">:</span>
<span class="n">params</span> <span class="o">=</span> <span class="n">BatchEndParam</span><span class="p">(</span><span class="n">epoch</span><span class="o">=</span><span class="n">epoch</span><span class="p">,</span>
<span class="n">nbatch</span><span class="o">=</span><span class="n">actual_num_batch</span><span class="p">,</span>
<span class="n">eval_metric</span><span class="o">=</span><span class="n">eval_metric</span><span class="p">,</span>
<span class="nb">locals</span><span class="o">=</span><span class="nb">locals</span><span class="p">())</span>
<span class="k">for</span> <span class="n">callback</span> <span class="ow">in</span> <span class="n">_as_list</span><span class="p">(</span><span class="n">score_end_callback</span><span class="p">):</span>
<span class="n">callback</span><span class="p">(</span><span class="n">params</span><span class="p">)</span>
<span class="k">return</span> <span class="n">eval_metric</span><span class="o">.</span><span class="n">get_name_value</span><span class="p">()</span></div>
<div class="viewcode-block" id="BaseModule.iter_predict"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.iter_predict">[docs]</a> <span class="k">def</span> <span class="nf">iter_predict</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">eval_data</span><span class="p">,</span> <span class="n">num_batch</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">reset</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">sparse_row_id_fn</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Iterates over predictions.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; for pred, i_batch, batch in module.iter_predict(eval_data):</span>
<span class="sd"> ... # pred is a list of outputs from the module</span>
<span class="sd"> ... # i_batch is a integer</span>
<span class="sd"> ... # batch is the data batch from the data iterator</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> eval_data : DataIter</span>
<span class="sd"> Evaluation data to run prediction on.</span>
<span class="sd"> num_batch : int</span>
<span class="sd"> Default is ``None``, indicating running all the batches in the data iterator.</span>
<span class="sd"> reset : bool</span>
<span class="sd"> Default is ``True``, indicating whether we should reset the data iter before start</span>
<span class="sd"> doing prediction.</span>
<span class="sd"> sparse_row_id_fn : A callback function</span>
<span class="sd"> The function takes `data_batch` as an input and returns a dict of</span>
<span class="sd"> str -&gt; NDArray. The resulting dict is used for pulling row_sparse</span>
<span class="sd"> parameters from the kvstore, where the str key is the name of the param,</span>
<span class="sd"> and the value is the row id of the param to pull.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">assert</span> <span class="bp">self</span><span class="o">.</span><span class="n">binded</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">params_initialized</span>
<span class="k">if</span> <span class="n">reset</span><span class="p">:</span>
<span class="n">eval_data</span><span class="o">.</span><span class="n">reset</span><span class="p">()</span>
<span class="k">for</span> <span class="n">nbatch</span><span class="p">,</span> <span class="n">eval_batch</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">eval_data</span><span class="p">):</span>
<span class="k">if</span> <span class="n">num_batch</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span> <span class="ow">and</span> <span class="n">nbatch</span> <span class="o">==</span> <span class="n">num_batch</span><span class="p">:</span>
<span class="k">break</span>
<span class="bp">self</span><span class="o">.</span><span class="n">prepare</span><span class="p">(</span><span class="n">eval_batch</span><span class="p">,</span> <span class="n">sparse_row_id_fn</span><span class="o">=</span><span class="n">sparse_row_id_fn</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">forward</span><span class="p">(</span><span class="n">eval_batch</span><span class="p">,</span> <span class="n">is_train</span><span class="o">=</span><span class="kc">False</span><span class="p">)</span>
<span class="n">pad</span> <span class="o">=</span> <span class="n">eval_batch</span><span class="o">.</span><span class="n">pad</span>
<span class="n">outputs</span> <span class="o">=</span> <span class="p">[</span><span class="n">out</span><span class="p">[</span><span class="mi">0</span><span class="p">:</span><span class="n">out</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">-</span><span class="n">pad</span><span class="p">]</span> <span class="k">for</span> <span class="n">out</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_outputs</span><span class="p">()]</span>
<span class="k">yield</span> <span class="p">(</span><span class="n">outputs</span><span class="p">,</span> <span class="n">nbatch</span><span class="p">,</span> <span class="n">eval_batch</span><span class="p">)</span></div>
<div class="viewcode-block" id="BaseModule.predict"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.predict">[docs]</a> <span class="k">def</span> <span class="nf">predict</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">eval_data</span><span class="p">,</span> <span class="n">num_batch</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">merge_batches</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">reset</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span>
<span class="n">always_output_list</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">sparse_row_id_fn</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Runs prediction and collects the outputs.</span>
<span class="sd"> When `merge_batches` is ``True`` (by default), the return value will be a list</span>
<span class="sd"> ``[out1, out2, out3]``, where each element is formed by concatenating the outputs for</span>
<span class="sd"> all the mini-batches. When `always_output_list` is ``False`` (as by default),</span>
<span class="sd"> then in the case of a single output, `out1` is returned instead of ``[out1]``.</span>
<span class="sd"> When `merge_batches` is ``False``, the return value will be a nested list like</span>
<span class="sd"> ``[[out1_batch1, out2_batch1], [out1_batch2], ...]``. This mode is useful because</span>
<span class="sd"> in some cases (e.g. bucketing), the module does not necessarily produce the same</span>
<span class="sd"> number of outputs.</span>
<span class="sd"> The objects in the results have type `NDArray`. If you need to work with a numpy array,</span>
<span class="sd"> just call ``.asnumpy()`` on each `NDArray`.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> eval_data : DataIter or NDArray or numpy array</span>
<span class="sd"> Evaluation data to run prediction on.</span>
<span class="sd"> num_batch : int</span>
<span class="sd"> Defaults to ``None``, indicates running all the batches in the data iterator.</span>
<span class="sd"> merge_batches : bool</span>
<span class="sd"> Defaults to ``True``, see above for return values.</span>
<span class="sd"> reset : bool</span>
<span class="sd"> Defaults to ``True``, indicates whether we should reset the data iter before</span>
<span class="sd"> doing prediction.</span>
<span class="sd"> always_output_list : bool</span>
<span class="sd"> Defaults to ``False``, see above for return values.</span>
<span class="sd"> sparse_row_id_fn : A callback function</span>
<span class="sd"> The function takes `data_batch` as an input and returns a dict of</span>
<span class="sd"> str -&gt; NDArray. The resulting dict is used for pulling row_sparse</span>
<span class="sd"> parameters from the kvstore, where the str key is the name of the param,</span>
<span class="sd"> and the value is the row id of the param to pull.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> list of NDArray or list of list of NDArray</span>
<span class="sd"> Prediction results.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of using `predict` for prediction.</span>
<span class="sd"> &gt;&gt;&gt; # Predict on the first 10 batches of val_dataiter</span>
<span class="sd"> &gt;&gt;&gt; mod.predict(eval_data=val_dataiter, num_batch=10)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">assert</span> <span class="bp">self</span><span class="o">.</span><span class="n">binded</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">params_initialized</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">eval_data</span><span class="p">,</span> <span class="p">(</span><span class="n">ndarray</span><span class="o">.</span><span class="n">NDArray</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">ndarray</span><span class="p">)):</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">eval_data</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">ndarray</span><span class="p">):</span>
<span class="n">eval_data</span> <span class="o">=</span> <span class="n">ndarray</span><span class="o">.</span><span class="n">array</span><span class="p">(</span><span class="n">eval_data</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">forward</span><span class="p">(</span><span class="n">DataBatch</span><span class="p">([</span><span class="n">eval_data</span><span class="p">]))</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_outputs</span><span class="p">()[</span><span class="mi">0</span><span class="p">]</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">eval_data</span><span class="p">,</span> <span class="n">DataIter</span><span class="p">):</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;eval_data must be of type NDArray or DataIter&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="n">reset</span><span class="p">:</span>
<span class="n">eval_data</span><span class="o">.</span><span class="n">reset</span><span class="p">()</span>
<span class="n">output_list</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">nbatch</span><span class="p">,</span> <span class="n">eval_batch</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">eval_data</span><span class="p">):</span>
<span class="k">if</span> <span class="n">num_batch</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span> <span class="ow">and</span> <span class="n">nbatch</span> <span class="o">==</span> <span class="n">num_batch</span><span class="p">:</span>
<span class="k">break</span>
<span class="bp">self</span><span class="o">.</span><span class="n">prepare</span><span class="p">(</span><span class="n">eval_batch</span><span class="p">,</span> <span class="n">sparse_row_id_fn</span><span class="o">=</span><span class="n">sparse_row_id_fn</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">forward</span><span class="p">(</span><span class="n">eval_batch</span><span class="p">,</span> <span class="n">is_train</span><span class="o">=</span><span class="kc">False</span><span class="p">)</span>
<span class="n">pad</span> <span class="o">=</span> <span class="n">eval_batch</span><span class="o">.</span><span class="n">pad</span>
<span class="n">outputs</span> <span class="o">=</span> <span class="p">[</span><span class="n">out</span><span class="p">[</span><span class="mi">0</span><span class="p">:</span><span class="n">out</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">-</span><span class="n">pad</span><span class="p">]</span><span class="o">.</span><span class="n">copy</span><span class="p">()</span> <span class="k">for</span> <span class="n">out</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_outputs</span><span class="p">()]</span>
<span class="n">output_list</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">outputs</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">output_list</span><span class="p">)</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span> <span class="n">output_list</span>
<span class="k">if</span> <span class="n">merge_batches</span><span class="p">:</span>
<span class="n">num_outputs</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">output_list</span><span class="p">[</span><span class="mi">0</span><span class="p">])</span>
<span class="k">for</span> <span class="n">out</span> <span class="ow">in</span> <span class="n">output_list</span><span class="p">:</span>
<span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="n">out</span><span class="p">)</span> <span class="o">==</span> <span class="n">num_outputs</span><span class="p">,</span> \
<span class="s1">&#39;Cannot merge batches, as num of outputs is not the same &#39;</span> <span class="o">+</span> \
<span class="s1">&#39;in mini-batches. Maybe bucketing is used?&#39;</span>
<span class="n">output_list2</span> <span class="o">=</span> <span class="p">[</span><span class="n">ndarray</span><span class="o">.</span><span class="n">concatenate</span><span class="p">([</span><span class="n">out</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="k">for</span> <span class="n">out</span> <span class="ow">in</span> <span class="n">output_list</span><span class="p">])</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">num_outputs</span><span class="p">)]</span>
<span class="k">if</span> <span class="n">num_outputs</span> <span class="o">==</span> <span class="mi">1</span> <span class="ow">and</span> <span class="ow">not</span> <span class="n">always_output_list</span><span class="p">:</span>
<span class="k">return</span> <span class="n">output_list2</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
<span class="k">return</span> <span class="n">output_list2</span>
<span class="k">return</span> <span class="n">output_list</span></div>
<div class="viewcode-block" id="BaseModule.fit"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.fit">[docs]</a> <span class="k">def</span> <span class="nf">fit</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">train_data</span><span class="p">,</span> <span class="n">eval_data</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">eval_metric</span><span class="o">=</span><span class="s1">&#39;acc&#39;</span><span class="p">,</span>
<span class="n">epoch_end_callback</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">batch_end_callback</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">kvstore</span><span class="o">=</span><span class="s1">&#39;local&#39;</span><span class="p">,</span>
<span class="n">optimizer</span><span class="o">=</span><span class="s1">&#39;sgd&#39;</span><span class="p">,</span> <span class="n">optimizer_params</span><span class="o">=</span><span class="p">((</span><span class="s1">&#39;learning_rate&#39;</span><span class="p">,</span> <span class="mf">0.01</span><span class="p">),),</span>
<span class="n">eval_end_callback</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">eval_batch_end_callback</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">initializer</span><span class="o">=</span><span class="n">Uniform</span><span class="p">(</span><span class="mf">0.01</span><span class="p">),</span>
<span class="n">arg_params</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">aux_params</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">allow_missing</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
<span class="n">force_rebind</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">force_init</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">begin_epoch</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">num_epoch</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">validation_metric</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">monitor</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">sparse_row_id_fn</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Trains the module parameters.</span>
<span class="sd"> Checkout `Module Tutorial &lt;https://mxnet.apache.org/api/python/tutorials/packages/module/index.html&gt;`_</span>
<span class="sd"> to see an end-to-end use-case.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> train_data : DataIter</span>
<span class="sd"> Train DataIter.</span>
<span class="sd"> eval_data : DataIter</span>
<span class="sd"> If not ``None``, will be used as validation set and the performance</span>
<span class="sd"> after each epoch will be evaluated.</span>
<span class="sd"> eval_metric : str or EvalMetric</span>
<span class="sd"> Defaults to &#39;accuracy&#39;. The performance measure used to display during training.</span>
<span class="sd"> Other possible predefined metrics are:</span>
<span class="sd"> &#39;ce&#39; (CrossEntropy), &#39;f1&#39;, &#39;mae&#39;, &#39;mse&#39;, &#39;rmse&#39;, &#39;top_k_accuracy&#39;.</span>
<span class="sd"> epoch_end_callback : function or list of functions</span>
<span class="sd"> Each callback will be called with the current `epoch`, `symbol`, `arg_params`</span>
<span class="sd"> and `aux_params`.</span>
<span class="sd"> batch_end_callback : function or list of function</span>
<span class="sd"> Each callback will be called with a `BatchEndParam`.</span>
<span class="sd"> kvstore : str or KVStore</span>
<span class="sd"> Defaults to &#39;local&#39;.</span>
<span class="sd"> optimizer : str or Optimizer</span>
<span class="sd"> Defaults to &#39;sgd&#39;.</span>
<span class="sd"> optimizer_params : dict</span>
<span class="sd"> Defaults to ``((&#39;learning_rate&#39;, 0.01),)``. The parameters for</span>
<span class="sd"> the optimizer constructor.</span>
<span class="sd"> The default value is not a dict, just to avoid pylint warning on dangerous</span>
<span class="sd"> default values.</span>
<span class="sd"> eval_end_callback : function or list of function</span>
<span class="sd"> These will be called at the end of each full evaluation, with the metrics over</span>
<span class="sd"> the entire evaluation set.</span>
<span class="sd"> eval_batch_end_callback : function or list of function</span>
<span class="sd"> These will be called at the end of each mini-batch during evaluation.</span>
<span class="sd"> initializer : Initializer</span>
<span class="sd"> The initializer is called to initialize the module parameters when they are</span>
<span class="sd"> not already initialized.</span>
<span class="sd"> arg_params : dict</span>
<span class="sd"> Defaults to ``None``, if not ``None``, should be existing parameters from a trained</span>
<span class="sd"> model or loaded from a checkpoint (previously saved model). In this case,</span>
<span class="sd"> the value here will be used to initialize the module parameters, unless they</span>
<span class="sd"> are already initialized by the user via a call to `init_params` or `fit`.</span>
<span class="sd"> `arg_params` has a higher priority than `initializer`.</span>
<span class="sd"> aux_params : dict</span>
<span class="sd"> Defaults to ``None``. Similar to `arg_params`, except for auxiliary states.</span>
<span class="sd"> allow_missing : bool</span>
<span class="sd"> Defaults to ``False``. Indicates whether to allow missing parameters when `arg_params`</span>
<span class="sd"> and `aux_params` are not ``None``. If this is ``True``, then the missing parameters</span>
<span class="sd"> will be initialized via the `initializer`.</span>
<span class="sd"> force_rebind : bool</span>
<span class="sd"> Defaults to ``False``. Whether to force rebinding the executors if already bound.</span>
<span class="sd"> force_init : bool</span>
<span class="sd"> Defaults to ``False``. Indicates whether to force initialization even if the</span>
<span class="sd"> parameters are already initialized.</span>
<span class="sd"> begin_epoch : int</span>
<span class="sd"> Defaults to 0. Indicates the starting epoch. Usually, if resumed from a</span>
<span class="sd"> checkpoint saved at a previous training phase at epoch N, then this value should be</span>
<span class="sd"> N+1.</span>
<span class="sd"> num_epoch : int</span>
<span class="sd"> Number of epochs for training.</span>
<span class="sd"> sparse_row_id_fn : A callback function</span>
<span class="sd"> The function takes `data_batch` as an input and returns a dict of</span>
<span class="sd"> str -&gt; NDArray. The resulting dict is used for pulling row_sparse</span>
<span class="sd"> parameters from the kvstore, where the str key is the name of the param,</span>
<span class="sd"> and the value is the row id of the param to pull.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of using fit for training.</span>
<span class="sd"> &gt;&gt;&gt; # Assume training dataIter and validation dataIter are ready</span>
<span class="sd"> &gt;&gt;&gt; # Assume loading a previously checkpointed model</span>
<span class="sd"> &gt;&gt;&gt; sym, arg_params, aux_params = mx.model.load_checkpoint(model_prefix, 3)</span>
<span class="sd"> &gt;&gt;&gt; mod.fit(train_data=train_dataiter, eval_data=val_dataiter, optimizer=&#39;sgd&#39;,</span>
<span class="sd"> ... optimizer_params={&#39;learning_rate&#39;:0.01, &#39;momentum&#39;: 0.9},</span>
<span class="sd"> ... arg_params=arg_params, aux_params=aux_params,</span>
<span class="sd"> ... eval_metric=&#39;acc&#39;, num_epoch=10, begin_epoch=3)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">assert</span> <span class="n">num_epoch</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">,</span> <span class="s1">&#39;please specify number of epochs&#39;</span>
<span class="bp">self</span><span class="o">.</span><span class="n">bind</span><span class="p">(</span><span class="n">data_shapes</span><span class="o">=</span><span class="n">train_data</span><span class="o">.</span><span class="n">provide_data</span><span class="p">,</span> <span class="n">label_shapes</span><span class="o">=</span><span class="n">train_data</span><span class="o">.</span><span class="n">provide_label</span><span class="p">,</span>
<span class="n">for_training</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">force_rebind</span><span class="o">=</span><span class="n">force_rebind</span><span class="p">)</span>
<span class="k">if</span> <span class="n">monitor</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">install_monitor</span><span class="p">(</span><span class="n">monitor</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">init_params</span><span class="p">(</span><span class="n">initializer</span><span class="o">=</span><span class="n">initializer</span><span class="p">,</span> <span class="n">arg_params</span><span class="o">=</span><span class="n">arg_params</span><span class="p">,</span> <span class="n">aux_params</span><span class="o">=</span><span class="n">aux_params</span><span class="p">,</span>
<span class="n">allow_missing</span><span class="o">=</span><span class="n">allow_missing</span><span class="p">,</span> <span class="n">force_init</span><span class="o">=</span><span class="n">force_init</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">init_optimizer</span><span class="p">(</span><span class="n">kvstore</span><span class="o">=</span><span class="n">kvstore</span><span class="p">,</span> <span class="n">optimizer</span><span class="o">=</span><span class="n">optimizer</span><span class="p">,</span>
<span class="n">optimizer_params</span><span class="o">=</span><span class="n">optimizer_params</span><span class="p">)</span>
<span class="k">if</span> <span class="n">validation_metric</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">validation_metric</span> <span class="o">=</span> <span class="n">eval_metric</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">eval_metric</span><span class="p">,</span> <span class="n">metric</span><span class="o">.</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="n">eval_metric</span> <span class="o">=</span> <span class="n">metric</span><span class="o">.</span><span class="n">create</span><span class="p">(</span><span class="n">eval_metric</span><span class="p">)</span>
<span class="c1">################################################################################</span>
<span class="c1"># training loop</span>
<span class="c1">################################################################################</span>
<span class="k">for</span> <span class="n">epoch</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">begin_epoch</span><span class="p">,</span> <span class="n">num_epoch</span><span class="p">):</span>
<span class="n">tic</span> <span class="o">=</span> <span class="n">time</span><span class="o">.</span><span class="n">time</span><span class="p">()</span>
<span class="n">eval_metric</span><span class="o">.</span><span class="n">reset</span><span class="p">()</span>
<span class="n">nbatch</span> <span class="o">=</span> <span class="mi">0</span>
<span class="n">data_iter</span> <span class="o">=</span> <span class="nb">iter</span><span class="p">(</span><span class="n">train_data</span><span class="p">)</span>
<span class="n">end_of_batch</span> <span class="o">=</span> <span class="kc">False</span>
<span class="n">next_data_batch</span> <span class="o">=</span> <span class="nb">next</span><span class="p">(</span><span class="n">data_iter</span><span class="p">)</span>
<span class="k">while</span> <span class="ow">not</span> <span class="n">end_of_batch</span><span class="p">:</span>
<span class="n">data_batch</span> <span class="o">=</span> <span class="n">next_data_batch</span>
<span class="k">if</span> <span class="n">monitor</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">monitor</span><span class="o">.</span><span class="n">tic</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">forward_backward</span><span class="p">(</span><span class="n">data_batch</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">update</span><span class="p">()</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">data_batch</span><span class="p">,</span> <span class="nb">list</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">update_metric</span><span class="p">(</span><span class="n">eval_metric</span><span class="p">,</span>
<span class="p">[</span><span class="n">db</span><span class="o">.</span><span class="n">label</span> <span class="k">for</span> <span class="n">db</span> <span class="ow">in</span> <span class="n">data_batch</span><span class="p">],</span>
<span class="n">pre_sliced</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">update_metric</span><span class="p">(</span><span class="n">eval_metric</span><span class="p">,</span> <span class="n">data_batch</span><span class="o">.</span><span class="n">label</span><span class="p">)</span>
<span class="k">try</span><span class="p">:</span>
<span class="c1"># pre fetch next batch</span>
<span class="n">next_data_batch</span> <span class="o">=</span> <span class="nb">next</span><span class="p">(</span><span class="n">data_iter</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">prepare</span><span class="p">(</span><span class="n">next_data_batch</span><span class="p">,</span> <span class="n">sparse_row_id_fn</span><span class="o">=</span><span class="n">sparse_row_id_fn</span><span class="p">)</span>
<span class="k">except</span> <span class="ne">StopIteration</span><span class="p">:</span>
<span class="n">end_of_batch</span> <span class="o">=</span> <span class="kc">True</span>
<span class="k">if</span> <span class="n">monitor</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">monitor</span><span class="o">.</span><span class="n">toc_print</span><span class="p">()</span>
<span class="k">if</span> <span class="n">end_of_batch</span><span class="p">:</span>
<span class="n">eval_name_vals</span> <span class="o">=</span> <span class="n">eval_metric</span><span class="o">.</span><span class="n">get_global_name_value</span><span class="p">()</span>
<span class="k">if</span> <span class="n">batch_end_callback</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">batch_end_params</span> <span class="o">=</span> <span class="n">BatchEndParam</span><span class="p">(</span><span class="n">epoch</span><span class="o">=</span><span class="n">epoch</span><span class="p">,</span> <span class="n">nbatch</span><span class="o">=</span><span class="n">nbatch</span><span class="p">,</span>
<span class="n">eval_metric</span><span class="o">=</span><span class="n">eval_metric</span><span class="p">,</span>
<span class="nb">locals</span><span class="o">=</span><span class="nb">locals</span><span class="p">())</span>
<span class="k">for</span> <span class="n">callback</span> <span class="ow">in</span> <span class="n">_as_list</span><span class="p">(</span><span class="n">batch_end_callback</span><span class="p">):</span>
<span class="n">callback</span><span class="p">(</span><span class="n">batch_end_params</span><span class="p">)</span>
<span class="n">nbatch</span> <span class="o">+=</span> <span class="mi">1</span>
<span class="c1"># one epoch of training is finished</span>
<span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">val</span> <span class="ow">in</span> <span class="n">eval_name_vals</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">logger</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;Epoch[</span><span class="si">%d</span><span class="s1">] Train-</span><span class="si">%s</span><span class="s1">=</span><span class="si">%f</span><span class="s1">&#39;</span><span class="p">,</span> <span class="n">epoch</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">val</span><span class="p">)</span>
<span class="n">toc</span> <span class="o">=</span> <span class="n">time</span><span class="o">.</span><span class="n">time</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">logger</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;Epoch[</span><span class="si">%d</span><span class="s1">] Time cost=</span><span class="si">%.3f</span><span class="s1">&#39;</span><span class="p">,</span> <span class="n">epoch</span><span class="p">,</span> <span class="p">(</span><span class="n">toc</span><span class="o">-</span><span class="n">tic</span><span class="p">))</span>
<span class="c1"># sync aux params across devices</span>
<span class="n">arg_params</span><span class="p">,</span> <span class="n">aux_params</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_params</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">set_params</span><span class="p">(</span><span class="n">arg_params</span><span class="p">,</span> <span class="n">aux_params</span><span class="p">)</span>
<span class="k">if</span> <span class="n">epoch_end_callback</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">for</span> <span class="n">callback</span> <span class="ow">in</span> <span class="n">_as_list</span><span class="p">(</span><span class="n">epoch_end_callback</span><span class="p">):</span>
<span class="n">callback</span><span class="p">(</span><span class="n">epoch</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">symbol</span><span class="p">,</span> <span class="n">arg_params</span><span class="p">,</span> <span class="n">aux_params</span><span class="p">)</span>
<span class="c1">#----------------------------------------</span>
<span class="c1"># evaluation on validation set</span>
<span class="k">if</span> <span class="n">eval_data</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">res</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">score</span><span class="p">(</span><span class="n">eval_data</span><span class="p">,</span> <span class="n">validation_metric</span><span class="p">,</span>
<span class="n">score_end_callback</span><span class="o">=</span><span class="n">eval_end_callback</span><span class="p">,</span>
<span class="n">batch_end_callback</span><span class="o">=</span><span class="n">eval_batch_end_callback</span><span class="p">,</span> <span class="n">epoch</span><span class="o">=</span><span class="n">epoch</span><span class="p">)</span>
<span class="c1">#TODO: pull this into default</span>
<span class="k">for</span> <span class="n">name</span><span class="p">,</span> <span class="n">val</span> <span class="ow">in</span> <span class="n">res</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">logger</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">&#39;Epoch[</span><span class="si">%d</span><span class="s1">] Validation-</span><span class="si">%s</span><span class="s1">=</span><span class="si">%f</span><span class="s1">&#39;</span><span class="p">,</span> <span class="n">epoch</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">val</span><span class="p">)</span>
<span class="c1"># end of 1 epoch, reset the data-iter for another epoch</span>
<span class="n">train_data</span><span class="o">.</span><span class="n">reset</span><span class="p">()</span></div>
<span class="c1">################################################################################</span>
<span class="c1"># Symbol information</span>
<span class="c1">################################################################################</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">data_names</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;A list of names for data required by this module.&quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">output_names</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;A list of names for the outputs of this module.&quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span>
<span class="c1">################################################################################</span>
<span class="c1"># Input/Output information</span>
<span class="c1">################################################################################</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">data_shapes</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;A list of (name, shape) pairs specifying the data inputs to this module.&quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">label_shapes</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;A list of (name, shape) pairs specifying the label inputs to this module.</span>
<span class="sd"> If this module does not accept labels -- either it is a module without loss</span>
<span class="sd"> function, or it is not bound for training, then this should return an empty</span>
<span class="sd"> list ``[]``.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">output_shapes</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;A list of (name, shape) pairs specifying the outputs of this module.&quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span>
<span class="c1">################################################################################</span>
<span class="c1"># Parameters of a module</span>
<span class="c1">################################################################################</span>
<div class="viewcode-block" id="BaseModule.get_params"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.get_params">[docs]</a> <span class="k">def</span> <span class="nf">get_params</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Gets parameters, those are potentially copies of the actual parameters used</span>
<span class="sd"> to do computation on the device.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> ``(arg_params, aux_params)``</span>
<span class="sd"> A pair of dictionaries each mapping parameter names to NDArray values.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of getting module parameters.</span>
<span class="sd"> &gt;&gt;&gt; print mod.get_params()</span>
<span class="sd"> ({&#39;fc2_weight&#39;: &lt;NDArray 64x128 @cpu(0)&gt;, &#39;fc1_weight&#39;: &lt;NDArray 128x100 @cpu(0)&gt;,</span>
<span class="sd"> &#39;fc3_bias&#39;: &lt;NDArray 10 @cpu(0)&gt;, &#39;fc3_weight&#39;: &lt;NDArray 10x64 @cpu(0)&gt;,</span>
<span class="sd"> &#39;fc2_bias&#39;: &lt;NDArray 64 @cpu(0)&gt;, &#39;fc1_bias&#39;: &lt;NDArray 128 @cpu(0)&gt;}, {})</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
<div class="viewcode-block" id="BaseModule.init_params"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.init_params">[docs]</a> <span class="k">def</span> <span class="nf">init_params</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">initializer</span><span class="o">=</span><span class="n">Uniform</span><span class="p">(</span><span class="mf">0.01</span><span class="p">),</span> <span class="n">arg_params</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">aux_params</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">allow_missing</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">force_init</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">allow_extra</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Initializes the parameters and auxiliary states.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> initializer : Initializer</span>
<span class="sd"> Called to initialize parameters if needed.</span>
<span class="sd"> arg_params : dict</span>
<span class="sd"> If not ``None``, should be a dictionary of existing `arg_params`. Initialization</span>
<span class="sd"> will be copied from that.</span>
<span class="sd"> aux_params : dict</span>
<span class="sd"> If not ``None``, should be a dictionary of existing `aux_params`. Initialization</span>
<span class="sd"> will be copied from that.</span>
<span class="sd"> allow_missing : bool</span>
<span class="sd"> If ``True``, params could contain missing values, and the initializer will be</span>
<span class="sd"> called to fill those missing params.</span>
<span class="sd"> force_init : bool</span>
<span class="sd"> If ``True``, `force_init` will force re-initialize even if already initialized.</span>
<span class="sd"> allow_extra : boolean, optional</span>
<span class="sd"> Whether allow extra parameters that are not needed by symbol.</span>
<span class="sd"> If this is True, no error will be thrown when arg_params or aux_params</span>
<span class="sd"> contain extra parameters that is not needed by the executor.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of initializing module parameters.</span>
<span class="sd"> &gt;&gt;&gt; mod.init_params()</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
<div class="viewcode-block" id="BaseModule.set_params"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.set_params">[docs]</a> <span class="k">def</span> <span class="nf">set_params</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">arg_params</span><span class="p">,</span> <span class="n">aux_params</span><span class="p">,</span> <span class="n">allow_missing</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">force_init</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span>
<span class="n">allow_extra</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Assigns parameter and aux state values.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> arg_params : dict</span>
<span class="sd"> Dictionary of name to value (`NDArray`) mapping.</span>
<span class="sd"> aux_params : dict</span>
<span class="sd"> Dictionary of name to value (`NDArray`) mapping.</span>
<span class="sd"> allow_missing : bool</span>
<span class="sd"> If ``True``, params could contain missing values, and the initializer will be</span>
<span class="sd"> called to fill those missing params.</span>
<span class="sd"> force_init : bool</span>
<span class="sd"> If ``True``, will force re-initialize even if already initialized.</span>
<span class="sd"> allow_extra : boolean, optional</span>
<span class="sd"> Whether allow extra parameters that are not needed by symbol.</span>
<span class="sd"> If this is True, no error will be thrown when arg_params or aux_params</span>
<span class="sd"> contain extra parameters that is not needed by the executor.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of setting module parameters.</span>
<span class="sd"> &gt;&gt;&gt; sym, arg_params, aux_params = mx.model.load_checkpoint(model_prefix, n_epoch_load)</span>
<span class="sd"> &gt;&gt;&gt; mod.set_params(arg_params=arg_params, aux_params=aux_params)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="bp">self</span><span class="o">.</span><span class="n">init_params</span><span class="p">(</span><span class="n">initializer</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">arg_params</span><span class="o">=</span><span class="n">arg_params</span><span class="p">,</span> <span class="n">aux_params</span><span class="o">=</span><span class="n">aux_params</span><span class="p">,</span>
<span class="n">allow_missing</span><span class="o">=</span><span class="n">allow_missing</span><span class="p">,</span> <span class="n">force_init</span><span class="o">=</span><span class="n">force_init</span><span class="p">,</span>
<span class="n">allow_extra</span><span class="o">=</span><span class="n">allow_extra</span><span class="p">)</span></div>
<div class="viewcode-block" id="BaseModule.save_params"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.save_params">[docs]</a> <span class="k">def</span> <span class="nf">save_params</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">fname</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Saves model parameters to file.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> fname : str</span>
<span class="sd"> Path to output param file.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of saving module parameters.</span>
<span class="sd"> &gt;&gt;&gt; mod.save_params(&#39;myfile&#39;)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">arg_params</span><span class="p">,</span> <span class="n">aux_params</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_params</span><span class="p">()</span>
<span class="n">save_dict</span> <span class="o">=</span> <span class="p">{(</span><span class="s1">&#39;arg:</span><span class="si">%s</span><span class="s1">&#39;</span> <span class="o">%</span> <span class="n">k</span><span class="p">)</span> <span class="p">:</span> <span class="n">v</span><span class="o">.</span><span class="n">as_in_context</span><span class="p">(</span><span class="n">cpu</span><span class="p">())</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">arg_params</span><span class="o">.</span><span class="n">items</span><span class="p">()}</span>
<span class="n">save_dict</span><span class="o">.</span><span class="n">update</span><span class="p">({(</span><span class="s1">&#39;aux:</span><span class="si">%s</span><span class="s1">&#39;</span> <span class="o">%</span> <span class="n">k</span><span class="p">)</span> <span class="p">:</span> <span class="n">v</span><span class="o">.</span><span class="n">as_in_context</span><span class="p">(</span><span class="n">cpu</span><span class="p">())</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">aux_params</span><span class="o">.</span><span class="n">items</span><span class="p">()})</span>
<span class="n">ndarray</span><span class="o">.</span><span class="n">save</span><span class="p">(</span><span class="n">fname</span><span class="p">,</span> <span class="n">save_dict</span><span class="p">)</span></div>
<div class="viewcode-block" id="BaseModule.load_params"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.load_params">[docs]</a> <span class="k">def</span> <span class="nf">load_params</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">fname</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Loads model parameters from file.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> fname : str</span>
<span class="sd"> Path to input param file.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of loading module parameters.</span>
<span class="sd"> &gt;&gt;&gt; mod.load_params(&#39;myfile&#39;)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">save_dict</span> <span class="o">=</span> <span class="n">ndarray</span><span class="o">.</span><span class="n">load</span><span class="p">(</span><span class="n">fname</span><span class="p">)</span>
<span class="n">arg_params</span> <span class="o">=</span> <span class="p">{}</span>
<span class="n">aux_params</span> <span class="o">=</span> <span class="p">{}</span>
<span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">value</span> <span class="ow">in</span> <span class="n">save_dict</span><span class="o">.</span><span class="n">items</span><span class="p">():</span>
<span class="n">arg_type</span><span class="p">,</span> <span class="n">name</span> <span class="o">=</span> <span class="n">k</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">&#39;:&#39;</span><span class="p">,</span> <span class="mi">1</span><span class="p">)</span>
<span class="k">if</span> <span class="n">arg_type</span> <span class="o">==</span> <span class="s1">&#39;arg&#39;</span><span class="p">:</span>
<span class="n">arg_params</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="n">value</span>
<span class="k">elif</span> <span class="n">arg_type</span> <span class="o">==</span> <span class="s1">&#39;aux&#39;</span><span class="p">:</span>
<span class="n">aux_params</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="o">=</span> <span class="n">value</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;Invalid param file &quot;</span> <span class="o">+</span> <span class="n">fname</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">set_params</span><span class="p">(</span><span class="n">arg_params</span><span class="p">,</span> <span class="n">aux_params</span><span class="p">)</span></div>
<div class="viewcode-block" id="BaseModule.get_states"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.get_states">[docs]</a> <span class="k">def</span> <span class="nf">get_states</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">merge_multi_context</span><span class="o">=</span><span class="kc">True</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Gets states from all devices</span>
<span class="sd"> If `merge_multi_context` is ``True``, returns output of form ``[out1, out2]``.</span>
<span class="sd"> Otherwise, it returns output of the form</span>
<span class="sd"> ``[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]``.</span>
<span class="sd"> All output elements are `NDArray`.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> merge_multi_context : bool</span>
<span class="sd"> Defaults to ``True``. In the case when data-parallelism is used, the states</span>
<span class="sd"> will be collected from multiple devices. A ``True`` value indicates that we</span>
<span class="sd"> should merge the collected results so that they look like from a single</span>
<span class="sd"> executor.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> A list of ``NDArray`` or a list of list of ``NDArray``.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">assert</span> <span class="bp">self</span><span class="o">.</span><span class="n">binded</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">params_initialized</span>
<span class="k">assert</span> <span class="ow">not</span> <span class="n">merge_multi_context</span>
<span class="k">return</span> <span class="p">[]</span></div>
<div class="viewcode-block" id="BaseModule.set_states"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.set_states">[docs]</a> <span class="k">def</span> <span class="nf">set_states</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">states</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">value</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Sets value for states. Only one of states &amp; value can be specified.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> states : list of list of NDArray</span>
<span class="sd"> Source states arrays formatted like</span>
<span class="sd"> ``[[state1_dev1, state1_dev2], [state2_dev1, state2_dev2]]``.</span>
<span class="sd"> value : number</span>
<span class="sd"> A single scalar value for all state arrays.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">assert</span> <span class="bp">self</span><span class="o">.</span><span class="n">binded</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">params_initialized</span>
<span class="k">assert</span> <span class="ow">not</span> <span class="n">states</span> <span class="ow">and</span> <span class="ow">not</span> <span class="n">value</span></div>
<div class="viewcode-block" id="BaseModule.install_monitor"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.install_monitor">[docs]</a> <span class="k">def</span> <span class="nf">install_monitor</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">mon</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Installs monitor on all executors.&quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
<span class="c1">################################################################################</span>
<span class="c1"># Computations</span>
<span class="c1">################################################################################</span>
<span class="c1"># pylint: disable=unused-argument</span>
<div class="viewcode-block" id="BaseModule.prepare"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.prepare">[docs]</a> <span class="k">def</span> <span class="nf">prepare</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">data_batch</span><span class="p">,</span> <span class="n">sparse_row_id_fn</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="sd">&#39;&#39;&#39;Prepares the module for processing a data batch.</span>
<span class="sd"> Usually involves switching bucket and reshaping.</span>
<span class="sd"> For modules that contain `row_sparse` parameters in KVStore,</span>
<span class="sd"> it prepares the `row_sparse` parameters based on the sparse_row_id_fn.</span>
<span class="sd"> When KVStore is used to update parameters for multi-device or multi-machine training,</span>
<span class="sd"> a copy of the parameters are stored in KVStore. Note that for `row_sparse` parameters,</span>
<span class="sd"> the `update()` updates the copy of parameters in KVStore, but doesn&#39;t broadcast</span>
<span class="sd"> the updated parameters to all devices / machines. The `prepare` function is used to</span>
<span class="sd"> broadcast `row_sparse` parameters with the next batch of data.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> data_batch : DataBatch</span>
<span class="sd"> The current batch of data for forward computation.</span>
<span class="sd"> sparse_row_id_fn : A callback function</span>
<span class="sd"> The function takes `data_batch` as an input and returns a dict of</span>
<span class="sd"> str -&gt; NDArray. The resulting dict is used for pulling row_sparse</span>
<span class="sd"> parameters from the kvstore, where the str key is the name of the param,</span>
<span class="sd"> and the value is the row id of the param to pull.</span>
<span class="sd"> &#39;&#39;&#39;</span>
<span class="k">if</span> <span class="n">sparse_row_id_fn</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="ne">UserWarning</span><span class="p">(</span><span class="s2">&quot;sparse_row_id_fn is not invoked for BaseModule.&quot;</span><span class="p">))</span></div>
<span class="c1"># pylint: enable=unused-argument</span>
<div class="viewcode-block" id="BaseModule.forward"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.forward">[docs]</a> <span class="k">def</span> <span class="nf">forward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">data_batch</span><span class="p">,</span> <span class="n">is_train</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Forward computation. It supports data batches with different shapes, such as</span>
<span class="sd"> different batch sizes or different image sizes.</span>
<span class="sd"> If reshaping of data batch relates to modification of symbol or module, such as</span>
<span class="sd"> changing image layout ordering or switching from training to predicting, module</span>
<span class="sd"> rebinding is required.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> data_batch : DataBatch</span>
<span class="sd"> Could be anything with similar API implemented.</span>
<span class="sd"> is_train : bool</span>
<span class="sd"> Default is ``None``, which means `is_train` takes the value of ``self.for_training``.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; import mxnet as mx</span>
<span class="sd"> &gt;&gt;&gt; from collections import namedtuple</span>
<span class="sd"> &gt;&gt;&gt; Batch = namedtuple(&#39;Batch&#39;, [&#39;data&#39;])</span>
<span class="sd"> &gt;&gt;&gt; data = mx.sym.Variable(&#39;data&#39;)</span>
<span class="sd"> &gt;&gt;&gt; out = data * 2</span>
<span class="sd"> &gt;&gt;&gt; mod = mx.mod.Module(symbol=out, label_names=None)</span>
<span class="sd"> &gt;&gt;&gt; mod.bind(data_shapes=[(&#39;data&#39;, (1, 10))])</span>
<span class="sd"> &gt;&gt;&gt; mod.init_params()</span>
<span class="sd"> &gt;&gt;&gt; data1 = [mx.nd.ones((1, 10))]</span>
<span class="sd"> &gt;&gt;&gt; mod.forward(Batch(data1))</span>
<span class="sd"> &gt;&gt;&gt; print mod.get_outputs()[0].asnumpy()</span>
<span class="sd"> [[ 2. 2. 2. 2. 2. 2. 2. 2. 2. 2.]]</span>
<span class="sd"> &gt;&gt;&gt; # Forward with data batch of different shape</span>
<span class="sd"> &gt;&gt;&gt; data2 = [mx.nd.ones((3, 5))]</span>
<span class="sd"> &gt;&gt;&gt; mod.forward(Batch(data2))</span>
<span class="sd"> &gt;&gt;&gt; print mod.get_outputs()[0].asnumpy()</span>
<span class="sd"> [[ 2. 2. 2. 2. 2.]</span>
<span class="sd"> [ 2. 2. 2. 2. 2.]</span>
<span class="sd"> [ 2. 2. 2. 2. 2.]]</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
<div class="viewcode-block" id="BaseModule.backward"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.backward">[docs]</a> <span class="k">def</span> <span class="nf">backward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">out_grads</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Backward computation.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> out_grads : NDArray or list of NDArray, optional</span>
<span class="sd"> Gradient on the outputs to be propagated back.</span>
<span class="sd"> This parameter is only needed when bind is called</span>
<span class="sd"> on outputs that are not a loss function.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of backward computation.</span>
<span class="sd"> &gt;&gt;&gt; mod.backward()</span>
<span class="sd"> &gt;&gt;&gt; print mod.get_input_grads()[0].asnumpy()</span>
<span class="sd"> [[[ 1.10182791e-05 5.12257748e-06 4.01927764e-06 8.32566820e-06</span>
<span class="sd"> -1.59775993e-06 7.24269375e-06 7.28067835e-06 -1.65902311e-05</span>
<span class="sd"> 5.46342608e-06 8.44196393e-07]</span>
<span class="sd"> ...]]</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
<div class="viewcode-block" id="BaseModule.get_outputs"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.get_outputs">[docs]</a> <span class="k">def</span> <span class="nf">get_outputs</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">merge_multi_context</span><span class="o">=</span><span class="kc">True</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Gets outputs of the previous forward computation.</span>
<span class="sd"> If `merge_multi_context` is ``True``, it is like ``[out1, out2]``. Otherwise,</span>
<span class="sd"> it returns out put of form ``[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]``.</span>
<span class="sd"> All the output elements have type `NDArray`. When `merge_multi_context` is ``False``,</span>
<span class="sd"> those `NDArray` instances might live on different devices.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> merge_multi_context : bool</span>
<span class="sd"> Defaults to ``True``. In the case when data-parallelism is used, the outputs</span>
<span class="sd"> will be collected from multiple devices. A ``True`` value indicates that we</span>
<span class="sd"> should merge the collected results so that they look like from a single</span>
<span class="sd"> executor.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> list of `NDArray` or list of list of `NDArray`.</span>
<span class="sd"> Output</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of getting forward output.</span>
<span class="sd"> &gt;&gt;&gt; print mod.get_outputs()[0].asnumpy()</span>
<span class="sd"> [[ 0.09999977 0.10000153 0.10000716 0.10000195 0.09999853 0.09999743</span>
<span class="sd"> 0.10000272 0.10000113 0.09999088 0.09999888]]</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
<div class="viewcode-block" id="BaseModule.get_input_grads"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.get_input_grads">[docs]</a> <span class="k">def</span> <span class="nf">get_input_grads</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">merge_multi_context</span><span class="o">=</span><span class="kc">True</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Gets the gradients to the inputs, computed in the previous backward computation.</span>
<span class="sd"> If `merge_multi_context` is ``True``, it is like ``[grad1, grad2]``. Otherwise, it</span>
<span class="sd"> is like ``[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]``. All the output</span>
<span class="sd"> elements have type `NDArray`. When `merge_multi_context` is ``False``, those `NDArray`</span>
<span class="sd"> instances might live on different devices.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> merge_multi_context : bool</span>
<span class="sd"> Defaults to ``True``. In the case when data-parallelism is used, the gradients</span>
<span class="sd"> will be collected from multiple devices. A ``True`` value indicates that we</span>
<span class="sd"> should merge the collected results so that they look like from a single</span>
<span class="sd"> executor.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> list of NDArray or list of list of NDArray</span>
<span class="sd"> Input gradients.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of getting input gradients.</span>
<span class="sd"> &gt;&gt;&gt; print mod.get_input_grads()[0].asnumpy()</span>
<span class="sd"> [[[ 1.10182791e-05 5.12257748e-06 4.01927764e-06 8.32566820e-06</span>
<span class="sd"> -1.59775993e-06 7.24269375e-06 7.28067835e-06 -1.65902311e-05</span>
<span class="sd"> 5.46342608e-06 8.44196393e-07]</span>
<span class="sd"> ...]]</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
<div class="viewcode-block" id="BaseModule.update"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Updates parameters according to the installed optimizer and the gradients computed</span>
<span class="sd"> in the previous forward-backward batch.</span>
<span class="sd"> When KVStore is used to update parameters for multi-device or multi-machine training,</span>
<span class="sd"> a copy of the parameters are stored in KVStore. Note that for `row_sparse` parameters,</span>
<span class="sd"> this function does update the copy of parameters in KVStore, but doesn&#39;t broadcast the</span>
<span class="sd"> updated parameters to all devices / machines. Please call `prepare` to broadcast</span>
<span class="sd"> `row_sparse` parameters with the next batch of data.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of updating module parameters.</span>
<span class="sd"> &gt;&gt;&gt; mod.init_optimizer(kvstore=&#39;local&#39;, optimizer=&#39;sgd&#39;,</span>
<span class="sd"> ... optimizer_params=((&#39;learning_rate&#39;, 0.01), ))</span>
<span class="sd"> &gt;&gt;&gt; mod.backward()</span>
<span class="sd"> &gt;&gt;&gt; mod.update()</span>
<span class="sd"> &gt;&gt;&gt; print mod.get_params()[0][&#39;fc3_weight&#39;].asnumpy()</span>
<span class="sd"> [[ 5.86930104e-03 5.28078526e-03 -8.88729654e-03 -1.08308345e-03</span>
<span class="sd"> 6.13054074e-03 4.27560415e-03 1.53817423e-03 4.62131854e-03</span>
<span class="sd"> 4.69872449e-03 -2.42400169e-03 9.94111411e-04 1.12386420e-03</span>
<span class="sd"> ...]]</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
<div class="viewcode-block" id="BaseModule.update_metric"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.update_metric">[docs]</a> <span class="k">def</span> <span class="nf">update_metric</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">eval_metric</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">pre_sliced</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Evaluates and accumulates evaluation metric on outputs of the last forward</span>
<span class="sd"> computation.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> eval_metric : EvalMetric</span>
<span class="sd"> Evaluation metric to use.</span>
<span class="sd"> labels : list of NDArray if `pre_sliced` parameter is set to `False`,</span>
<span class="sd"> list of lists of NDArray otherwise. Typically `data_batch.label`.</span>
<span class="sd"> pre_sliced: bool</span>
<span class="sd"> Whether the labels are already sliced per device (default: False).</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of updating evaluation metric.</span>
<span class="sd"> &gt;&gt;&gt; mod.forward(data_batch)</span>
<span class="sd"> &gt;&gt;&gt; mod.update_metric(metric, data_batch.label)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
<span class="c1">################################################################################</span>
<span class="c1"># module setup</span>
<span class="c1">################################################################################</span>
<div class="viewcode-block" id="BaseModule.bind"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.bind">[docs]</a> <span class="k">def</span> <span class="nf">bind</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">data_shapes</span><span class="p">,</span> <span class="n">label_shapes</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">for_training</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span>
<span class="n">inputs_need_grad</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">force_rebind</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">shared_module</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">grad_req</span><span class="o">=</span><span class="s1">&#39;write&#39;</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Binds the symbols to construct executors. This is necessary before one</span>
<span class="sd"> can perform computation with the module.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> data_shapes : list of (str, tuple) or DataDesc objects</span>
<span class="sd"> Typically is ``data_iter.provide_data``. Can also be a list of</span>
<span class="sd"> (data name, data shape).</span>
<span class="sd"> label_shapes : list of (str, tuple) or DataDesc objects</span>
<span class="sd"> Typically is ``data_iter.provide_label``. Can also be a list of</span>
<span class="sd"> (label name, label shape).</span>
<span class="sd"> for_training : bool</span>
<span class="sd"> Default is ``True``. Whether the executors should be bind for training.</span>
<span class="sd"> inputs_need_grad : bool</span>
<span class="sd"> Default is ``False``. Whether the gradients to the input data need to be computed.</span>
<span class="sd"> Typically this is not needed. But this might be needed when implementing composition</span>
<span class="sd"> of modules.</span>
<span class="sd"> force_rebind : bool</span>
<span class="sd"> Default is ``False``. This function does nothing if the executors are already</span>
<span class="sd"> bound. But with this ``True``, the executors will be forced to rebind.</span>
<span class="sd"> shared_module : Module</span>
<span class="sd"> Default is ``None``. This is used in bucketing. When not ``None``, the shared module</span>
<span class="sd"> essentially corresponds to a different bucket -- a module with different symbol</span>
<span class="sd"> but with the same sets of parameters (e.g. unrolled RNNs with different lengths).</span>
<span class="sd"> grad_req : str, list of str, dict of str to str</span>
<span class="sd"> Requirement for gradient accumulation. Can be &#39;write&#39;, &#39;add&#39;, or &#39;null&#39;</span>
<span class="sd"> (default to &#39;write&#39;).</span>
<span class="sd"> Can be specified globally (str) or for each argument (list, dict).</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of binding symbols.</span>
<span class="sd"> &gt;&gt;&gt; mod.bind(data_shapes=[(&#39;data&#39;, (1, 10, 10))])</span>
<span class="sd"> &gt;&gt;&gt; # Assume train_iter is already created.</span>
<span class="sd"> &gt;&gt;&gt; mod.bind(data_shapes=train_iter.provide_data, label_shapes=train_iter.provide_label)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
<div class="viewcode-block" id="BaseModule.init_optimizer"><a class="viewcode-back" href="../../../api/module/index.html#mxnet.module.BaseModule.init_optimizer">[docs]</a> <span class="k">def</span> <span class="nf">init_optimizer</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">kvstore</span><span class="o">=</span><span class="s1">&#39;local&#39;</span><span class="p">,</span> <span class="n">optimizer</span><span class="o">=</span><span class="s1">&#39;sgd&#39;</span><span class="p">,</span>
<span class="n">optimizer_params</span><span class="o">=</span><span class="p">((</span><span class="s1">&#39;learning_rate&#39;</span><span class="p">,</span> <span class="mf">0.01</span><span class="p">),),</span> <span class="n">force_init</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Installs and initializes optimizers, as well as initialize kvstore for</span>
<span class="sd"> distributed training</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> kvstore : str or KVStore</span>
<span class="sd"> Defaults to `&#39;local&#39;`.</span>
<span class="sd"> optimizer : str or Optimizer</span>
<span class="sd"> Defaults to `&#39;sgd&#39;`.</span>
<span class="sd"> optimizer_params : dict</span>
<span class="sd"> Defaults to ``((&#39;learning_rate&#39;, 0.01),)``. The default value is not a dictionary,</span>
<span class="sd"> just to avoid pylint warning of dangerous default values.</span>
<span class="sd"> force_init : bool</span>
<span class="sd"> Defaults to ``False``, indicates whether to force re-initializing an optimizer</span>
<span class="sd"> if it is already installed.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; # An example of initializing optimizer.</span>
<span class="sd"> &gt;&gt;&gt; mod.init_optimizer(optimizer=&#39;sgd&#39;, optimizer_params=((&#39;learning_rate&#39;, 0.005),))</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
<span class="c1">################################################################################</span>
<span class="c1"># misc</span>
<span class="c1">################################################################################</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">symbol</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">&quot;&quot;&quot;Gets the symbol associated with this module.</span>
<span class="sd"> Except for `Module`, for other types of modules (e.g. `BucketingModule`), this</span>
<span class="sd"> property might not be a constant throughout its life time. Some modules might</span>
<span class="sd"> not even be associated with any symbols.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_symbol</span></div>
</pre></div>
<hr class="feedback-hr-top" />
<div class="feedback-container">
<div class="feedback-question">Did this page help you?</div>
<div class="feedback-answer-container">
<div class="feedback-answer yes-link" data-response="yes">Yes</div>
<div class="feedback-answer no-link" data-response="no">No</div>
</div>
<div class="feedback-thank-you">Thanks for your feedback!</div>
</div>
<hr class="feedback-hr-bottom" />
</div>
<div class="side-doc-outline">
<div class="side-doc-outline--content">
</div>
</div>
<div class="clearer"></div>
</div><div class="pagenation">
</div>
<footer class="site-footer h-card">
<div class="wrapper">
<div class="row">
<div class="col-4">
<h4 class="footer-category-title">Resources</h4>
<ul class="contact-list">
<li><a class="u-email" href="mailto:dev@mxnet.apache.org">Dev list</a></li>
<li><a class="u-email" href="mailto:user@mxnet.apache.org">User mailing list</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/MXNET/Apache+MXNet+Home">Developer Wiki</a></li>
<li><a href="https://issues.apache.org/jira/projects/MXNET/issues">Jira Tracker</a></li>
<li><a href="https://github.com/apache/incubator-mxnet/labels/Roadmap">Github Roadmap</a></li>
<li><a href="https://medium.com/apache-mxnet">Blog</a></li>
<li><a href="https://discuss.mxnet.io">Forum</a></li>
<li><a href="/community/contribute">Contribute</a></li>
</ul>
</div>
<div class="col-4"><ul class="social-media-list"><li><a href="https://github.com/apache/incubator-mxnet"><svg class="svg-icon"><use xlink:href="../../../_static/minima-social-icons.svg#github"></use></svg> <span class="username">apache/incubator-mxnet</span></a></li><li><a href="https://www.twitter.com/apachemxnet"><svg class="svg-icon"><use xlink:href="../../../_static/minima-social-icons.svg#twitter"></use></svg> <span class="username">apachemxnet</span></a></li><li><a href="https://youtube.com/apachemxnet"><svg class="svg-icon"><use xlink:href="../../../_static/minima-social-icons.svg#youtube"></use></svg> <span class="username">apachemxnet</span></a></li></ul>
</div>
<div class="col-4 footer-text">
<p>A flexible and efficient library for deep learning.</p>
</div>
</div>
</div>
</footer>
<footer class="site-footer2">
<div class="wrapper">
<div class="row">
<div class="col-3">
<img src="../../../_static/apache_incubator_logo.png" class="footer-logo col-2">
</div>
<div class="footer-bottom-warning col-9">
<p>Apache MXNet is an effort undergoing incubation at <a href="http://www.apache.org/">The Apache Software Foundation</a> (ASF), <span style="font-weight:bold">sponsored by the <i>Apache Incubator</i></span>. Incubation is required
of all newly accepted projects until a further review indicates that the infrastructure,
communications, and decision making process have stabilized in a manner consistent with other
successful ASF projects. While incubation status is not necessarily a reflection of the completeness
or stability of the code, it does indicate that the project has yet to be fully endorsed by the ASF.
</p><p>"Copyright © 2017-2018, The Apache Software Foundation Apache MXNet, MXNet, Apache, the Apache
feather, and the Apache MXNet project logo are either registered trademarks or trademarks of the
Apache Software Foundation."</p>
</div>
</div>
</div>
</footer>
</body>
</html>