blob: 94230efeaa31a46c3560ab34755d385bfa264b61 [file] [log] [blame]
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta charset="utf-8" />
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta http-equiv="x-ua-compatible" content="ie=edge">
<style>
.dropdown {
position: relative;
display: inline-block;
}
.dropdown-content {
display: none;
position: absolute;
background-color: #f9f9f9;
min-width: 160px;
box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2);
padding: 12px 16px;
z-index: 1;
text-align: left;
}
.dropdown:hover .dropdown-content {
display: block;
}
.dropdown-option:hover {
color: #FF4500;
}
.dropdown-option-active {
color: #FF4500;
font-weight: lighter;
}
.dropdown-option {
color: #000000;
font-weight: lighter;
}
.dropdown-header {
color: #FFFFFF;
display: inline-flex;
}
.dropdown-caret {
width: 18px;
}
.dropdown-caret-path {
fill: #FFFFFF;
}
</style>
<title>mxnet.symbol.contrib &#8212; Apache MXNet documentation</title>
<link rel="stylesheet" href="../../../_static/basic.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
<link rel="stylesheet" type="text/css" href="../../../_static/mxnet.css" />
<link rel="stylesheet" href="../../../_static/material-design-lite-1.3.0/material.blue-deep_orange.min.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/sphinx_materialdesign_theme.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/fontawesome/all.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/fonts.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/feedback.css" type="text/css" />
<script id="documentation_options" data-url_root="../../../" src="../../../_static/documentation_options.js"></script>
<script src="../../../_static/jquery.js"></script>
<script src="../../../_static/underscore.js"></script>
<script src="../../../_static/doctools.js"></script>
<script src="../../../_static/language_data.js"></script>
<script src="../../../_static/matomo_analytics.js"></script>
<script src="../../../_static/autodoc.js"></script>
<script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script>
<script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/x-mathjax-config">MathJax.Hub.Config({"tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]], "processEscapes": true, "ignoreClass": "document", "processClass": "math|output_area"}})</script>
<link rel="shortcut icon" href="../../../_static/mxnet-icon.png"/>
<link rel="index" title="Index" href="../../../genindex.html" />
<link rel="search" title="Search" href="../../../search.html" />
</head>
<body><header class="site-header" role="banner">
<div class="wrapper">
<a class="site-title" rel="author" href="/versions/1.9.1/"><img
src="../../../_static/mxnet_logo.png" class="site-header-logo"></a>
<nav class="site-nav">
<input type="checkbox" id="nav-trigger" class="nav-trigger"/>
<label for="nav-trigger">
<span class="menu-icon">
<svg viewBox="0 0 18 15" width="18px" height="15px">
<path d="M18,1.484c0,0.82-0.665,1.484-1.484,1.484H1.484C0.665,2.969,0,2.304,0,1.484l0,0C0,0.665,0.665,0,1.484,0 h15.032C17.335,0,18,0.665,18,1.484L18,1.484z M18,7.516C18,8.335,17.335,9,16.516,9H1.484C0.665,9,0,8.335,0,7.516l0,0 c0-0.82,0.665-1.484,1.484-1.484h15.032C17.335,6.031,18,6.696,18,7.516L18,7.516z M18,13.516C18,14.335,17.335,15,16.516,15H1.484 C0.665,15,0,14.335,0,13.516l0,0c0-0.82,0.665-1.483,1.484-1.483h15.032C17.335,12.031,18,12.695,18,13.516L18,13.516z"/>
</svg>
</span>
</label>
<div class="trigger">
<a class="page-link" href="/versions/1.9.1/get_started">Get Started</a>
<a class="page-link" href="/versions/1.9.1/features">Features</a>
<a class="page-link" href="/versions/1.9.1/ecosystem">Ecosystem</a>
<a class="page-link page-current" href="/versions/1.9.1/api">Docs & Tutorials</a>
<a class="page-link" href="/versions/1.9.1/trusted_by">Trusted By</a>
<a class="page-link" href="https://github.com/apache/mxnet">GitHub</a>
<div class="dropdown" style="min-width:100px">
<span class="dropdown-header">Apache
<svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
</span>
<div class="dropdown-content" style="min-width:250px">
<a href="https://www.apache.org/foundation/">Apache Software Foundation</a>
<a href="https://incubator.apache.org/">Apache Incubator</a>
<a href="https://www.apache.org/licenses/">License</a>
<a href="/versions/1.9.1/api/faq/security.html">Security</a>
<a href="https://privacy.apache.org/policies/privacy-policy-public.html">Privacy</a>
<a href="https://www.apache.org/events/current-event">Events</a>
<a href="https://www.apache.org/foundation/sponsorship.html">Sponsorship</a>
<a href="https://www.apache.org/foundation/thanks.html">Thanks</a>
</div>
</div>
<div class="dropdown">
<span class="dropdown-header">1.9.1
<svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
</span>
<div class="dropdown-content">
<a class="dropdown-option" href="/">master</a><br>
<a class="dropdown-option-active" href="/versions/1.9.1/">1.9.1</a><br>
<a class="dropdown-option" href="/versions/1.8.0/">1.8.0</a><br>
<a class="dropdown-option" href="/versions/1.7.0/">1.7.0</a><br>
<a class="dropdown-option" href="/versions/1.6.0/">1.6.0</a><br>
<a class="dropdown-option" href="/versions/1.5.0/">1.5.0</a><br>
<a class="dropdown-option" href="/versions/1.4.1/">1.4.1</a><br>
<a class="dropdown-option" href="/versions/1.3.1/">1.3.1</a><br>
<a class="dropdown-option" href="/versions/1.2.1/">1.2.1</a><br>
<a class="dropdown-option" href="/versions/1.1.0/">1.1.0</a><br>
<a class="dropdown-option" href="/versions/1.0.0/">1.0.0</a><br>
<a class="dropdown-option" href="/versions/0.12.1/">0.12.1</a><br>
<a class="dropdown-option" href="/versions/0.11.0/">0.11.0</a>
</div>
</div>
</div>
</nav>
</div>
</header>
<div class="mdl-layout mdl-js-layout mdl-layout--fixed-header mdl-layout--fixed-drawer"><header class="mdl-layout__header mdl-layout__header--waterfall ">
<div class="mdl-layout__header-row">
<nav class="mdl-navigation breadcrumb">
<a class="mdl-navigation__link" href="../../index.html">Module code</a><i class="material-icons">navigate_next</i>
<a class="mdl-navigation__link is-active">mxnet.symbol.contrib</a>
</nav>
<div class="mdl-layout-spacer"></div>
<nav class="mdl-navigation">
<form class="form-inline pull-sm-right" action="../../../search.html" method="get">
<div class="mdl-textfield mdl-js-textfield mdl-textfield--expandable mdl-textfield--floating-label mdl-textfield--align-right">
<label id="quick-search-icon" class="mdl-button mdl-js-button mdl-button--icon" for="waterfall-exp">
<i class="material-icons">search</i>
</label>
<div class="mdl-textfield__expandable-holder">
<input class="mdl-textfield__input" type="text" name="q" id="waterfall-exp" placeholder="Search" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</div>
</div>
<div class="mdl-tooltip" data-mdl-for="quick-search-icon">
Quick search
</div>
</form>
</nav>
</div>
<div class="mdl-layout__header-row header-links">
<div class="mdl-layout-spacer"></div>
<nav class="mdl-navigation">
</nav>
</div>
</header><header class="mdl-layout__drawer">
<div class="globaltoc">
<span class="mdl-layout-title toc">Table Of Contents</span>
<nav class="mdl-navigation">
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../../tutorials/index.html">Python Tutorials</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/getting-started/index.html">Getting Started</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/index.html">Crash Course</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/1-ndarray.html">Manipulate data with <code class="docutils literal notranslate"><span class="pre">ndarray</span></code></a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/2-nn.html">Create a neural network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/3-autograd.html">Automatic differentiation with <code class="docutils literal notranslate"><span class="pre">autograd</span></code></a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/4-train.html">Train the neural network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/5-predict.html">Predict with a pre-trained model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/6-use_gpus.html">Use GPUs</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/to-mxnet/index.html">Moving to MXNet from Other Frameworks</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/to-mxnet/pytorch.html">PyTorch vs Apache MXNet</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/gluon_from_experiment_to_deployment.html">Gluon: from experiment to deployment</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/logistic_regression_explained.html">Logistic regression explained</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/image/mnist.html">MNIST</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/packages/index.html">Packages</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/autograd/index.html">Automatic Differentiation</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/gluon/index.html">Gluon</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/index.html">Blocks</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/custom-layer.html">Custom Layers</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/custom_layer_beginners.html">Customer Layers (Beginners)</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/hybridize.html">Hybridize</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/init.html">Initialization</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/naming.html">Parameter and Block Naming</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/nn.html">Layers and Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/parameters.html">Parameter Management</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/save_load_params.html">Saving and Loading Gluon Models</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/activations/activations.html">Activation Blocks</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/data/index.html">Data Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html">Image Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html#Spatial-Augmentation">Spatial Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html#Color-Augmentation">Color Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html#Composed-Augmentations">Composed Augmentations</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html">Gluon <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s and <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Using-own-data-with-included-Datasets">Using own data with included <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Using-own-data-with-custom-Datasets">Using own data with custom <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Appendix:-Upgrading-from-Module-DataIter-to-Gluon-DataLoader">Appendix: Upgrading from Module <code class="docutils literal notranslate"><span class="pre">DataIter</span></code> to Gluon <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/image/index.html">Image Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/image-augmentation.html">Image Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/info_gan.html">Image similarity search with InfoGAN</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/mnist.html">Handwritten Digit Recognition</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/pretrained_models.html">Using pre-trained models in MXNet</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/index.html">Losses</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/custom-loss.html">Custom Loss Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/kl_divergence.html">Kullback-Leibler (KL) Divergence</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/loss.html">Loss functions</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/text/index.html">Text Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/text/gnmt.html">Google Neural Machine Translation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/text/transformer.html">Machine Translation with Transformer</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/training/index.html">Training</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/fit_api_tutorial.html">MXNet Gluon Fit API</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/trainer.html">Trainer</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/index.html">Learning Rates</a><ul>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_finder.html">Learning Rate Finder</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_schedules.html">Learning Rate Schedules</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_schedules_advanced.html">Advanced Learning Rate Schedules</a></li>
</ul>
</li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/normalization/index.html">Normalization Blocks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/kvstore/index.html">KVStore</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/kvstore/kvstore.html">Distributed Key-Value Store</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/ndarray/index.html">NDArray</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/01-ndarray-intro.html">An Intro: Manipulate Data the MXNet Way with NDArray</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/02-ndarray-operations.html">NDArray Operations</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/03-ndarray-contexts.html">NDArray Contexts</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/gotchas_numpy_in_mxnet.html">Gotchas using NumPy in Apache MXNet</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/index.html">Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/csr.html">CSRNDArray - NDArray in Compressed Sparse Row Storage Format</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/row_sparse.html">RowSparseNDArray - NDArray for Sparse Gradient Updates</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/train.html">Train a Linear Regression Model with Sparse Symbols</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/train_gluon.html">Sparse NDArrays with Gluon</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/onnx/index.html">ONNX</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/fine_tuning_gluon.html">Fine-tuning an ONNX model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/inference_on_onnx_model.html">Running inference on MXNet/Gluon from an ONNX model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/super_resolution.html">Importing an ONNX model into MXNet</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/deploy/export/onnx.html">Export ONNX Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/optimizer/index.html">Optimizers</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/viz/index.html">Visualization</a><ul>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/visualize_graph">Visualize networks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/performance/index.html">Performance</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/performance/compression/index.html">Compression</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/compression/int8.html">Deploy with int-8</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/float16">Float16</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/gradient_compression">Gradient Compression</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/int8_inference.html">GluonCV with Quantized Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/performance/backend/index.html">Accelerated Backend Tools</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/index.html">Intel MKL-DNN</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/mkldnn_quantization.html">Quantize with MKL-DNN backend</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/mkldnn_quantization.html#Improving-accuracy-with-Intel®-Neural-Compressor">Improving accuracy with Intel® Neural Compressor</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/mkldnn_readme.html">Install MXNet with MKL-DNN</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/tensorrt/index.html">TensorRT</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/tensorrt/tensorrt.html">Optimizing Deep Learning Computation Graphs with TensorRT</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/tvm.html">Use TVM</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/profiler.html">Profiling MXNet Models</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/amp.html">Using AMP: Automatic Mixed Precision</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/deploy/index.html">Deployment</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/export/index.html">Export</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/export/onnx.html">Exporting to ONNX format</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/export_network.html">Export Gluon CV Models</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/blocks/save_load_params.html">Save / Load Parameters</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/inference/index.html">Inference</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/cpp.html">Deploy into C++</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/image_classification_jetson.html">Image Classication using pretrained ResNet-50 model on Jetson module</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/scala.html">Deploy into a Java or Scala Environment</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/wine_detector.html">Real-time Object Detection with MXNet On The Raspberry Pi</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/index.html">Run on AWS</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/use_ec2.html">Run on an EC2 Instance</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/use_sagemaker.html">Run on Amazon SageMaker</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/cloud.html">MXNet on the Cloud</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/extend/index.html">Extend</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/extend/custom_layer.html">Custom Layers</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/extend/customop.html">Custom Numpy Operators</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/new_op">New Operator Creation</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/add_op_in_backend">New Operator in MXNet Backend</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../../api/index.html">Python API</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../api/ndarray/index.html">mxnet.ndarray</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/ndarray.html">ndarray</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/contrib/index.html">ndarray.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/image/index.html">ndarray.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/linalg/index.html">ndarray.linalg</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/op/index.html">ndarray.op</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/random/index.html">ndarray.random</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/register/index.html">ndarray.register</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/sparse/index.html">ndarray.sparse</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/utils/index.html">ndarray.utils</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/gluon/index.html">mxnet.gluon</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/block.html">gluon.Block</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/hybrid_block.html">gluon.HybridBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/symbol_block.html">gluon.SymbolBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/constant.html">gluon.Constant</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/parameter.html">gluon.Parameter</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/parameter_dict.html">gluon.ParameterDict</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/trainer.html">gluon.Trainer</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/contrib/index.html">gluon.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/data/index.html">gluon.data</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/gluon/data/vision/index.html">data.vision</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/gluon/data/vision/datasets/index.html">vision.datasets</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/gluon/data/vision/transforms/index.html">vision.transforms</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/loss/index.html">gluon.loss</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/model_zoo/index.html">gluon.model_zoo.vision</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/nn/index.html">gluon.nn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/rnn/index.html">gluon.rnn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/utils/index.html">gluon.utils</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/autograd/index.html">mxnet.autograd</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/initializer/index.html">mxnet.initializer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/optimizer/index.html">mxnet.optimizer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/lr_scheduler/index.html">mxnet.lr_scheduler</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/metric/index.html">mxnet.metric</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore/index.html">mxnet.kvstore</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/symbol/index.html">mxnet.symbol</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/symbol.html">symbol</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/contrib/index.html">symbol.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/image/index.html">symbol.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/linalg/index.html">symbol.linalg</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/op/index.html">symbol.op</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/random/index.html">symbol.random</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/register/index.html">symbol.register</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/sparse/index.html">symbol.sparse</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/module/index.html">mxnet.module</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/contrib/index.html">mxnet.contrib</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/autograd/index.html">contrib.autograd</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/io/index.html">contrib.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/ndarray/index.html">contrib.ndarray</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/onnx/index.html">contrib.onnx</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/quantization/index.html">contrib.quantization</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/symbol/index.html">contrib.symbol</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/tensorboard/index.html">contrib.tensorboard</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/tensorrt/index.html">contrib.tensorrt</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/text/index.html">contrib.text</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/mxnet/index.html">mxnet</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/attribute/index.html">mxnet.attribute</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/base/index.html">mxnet.base</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/callback/index.html">mxnet.callback</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/context/index.html">mxnet.context</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/engine/index.html">mxnet.engine</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/executor/index.html">mxnet.executor</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/executor_manager/index.html">mxnet.executor_manager</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/image/index.html">mxnet.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/io/index.html">mxnet.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/kvstore_server/index.html">mxnet.kvstore_server</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/libinfo/index.html">mxnet.libinfo</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/log/index.html">mxnet.log</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/model/index.html">mxnet.model</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/monitor/index.html">mxnet.monitor</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/name/index.html">mxnet.name</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/notebook/index.html">mxnet.notebook</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/operator/index.html">mxnet.operator</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/profiler/index.html">mxnet.profiler</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/random/index.html">mxnet.random</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/recordio/index.html">mxnet.recordio</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/registry/index.html">mxnet.registry</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/rtc/index.html">mxnet.rtc</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/runtime/index.html">mxnet.runtime</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/test_utils/index.html">mxnet.test_utils</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/torch/index.html">mxnet.torch</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/util/index.html">mxnet.util</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/visualization/index.html">mxnet.visualization</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</nav>
</div>
</header>
<main class="mdl-layout__content" tabIndex="0">
<script type="text/javascript" src="../../../_static/sphinx_materialdesign_theme.js "></script>
<script type="text/javascript" src="../../../_static/feedback.js"></script>
<header class="mdl-layout__drawer">
<div class="globaltoc">
<span class="mdl-layout-title toc">Table Of Contents</span>
<nav class="mdl-navigation">
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../../tutorials/index.html">Python Tutorials</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/getting-started/index.html">Getting Started</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/index.html">Crash Course</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/1-ndarray.html">Manipulate data with <code class="docutils literal notranslate"><span class="pre">ndarray</span></code></a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/2-nn.html">Create a neural network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/3-autograd.html">Automatic differentiation with <code class="docutils literal notranslate"><span class="pre">autograd</span></code></a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/4-train.html">Train the neural network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/5-predict.html">Predict with a pre-trained model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/6-use_gpus.html">Use GPUs</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/to-mxnet/index.html">Moving to MXNet from Other Frameworks</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/to-mxnet/pytorch.html">PyTorch vs Apache MXNet</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/gluon_from_experiment_to_deployment.html">Gluon: from experiment to deployment</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/logistic_regression_explained.html">Logistic regression explained</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/image/mnist.html">MNIST</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/packages/index.html">Packages</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/autograd/index.html">Automatic Differentiation</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/gluon/index.html">Gluon</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/index.html">Blocks</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/custom-layer.html">Custom Layers</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/custom_layer_beginners.html">Customer Layers (Beginners)</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/hybridize.html">Hybridize</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/init.html">Initialization</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/naming.html">Parameter and Block Naming</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/nn.html">Layers and Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/parameters.html">Parameter Management</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/save_load_params.html">Saving and Loading Gluon Models</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/activations/activations.html">Activation Blocks</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/data/index.html">Data Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html">Image Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html#Spatial-Augmentation">Spatial Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html#Color-Augmentation">Color Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html#Composed-Augmentations">Composed Augmentations</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html">Gluon <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s and <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Using-own-data-with-included-Datasets">Using own data with included <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Using-own-data-with-custom-Datasets">Using own data with custom <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Appendix:-Upgrading-from-Module-DataIter-to-Gluon-DataLoader">Appendix: Upgrading from Module <code class="docutils literal notranslate"><span class="pre">DataIter</span></code> to Gluon <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/image/index.html">Image Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/image-augmentation.html">Image Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/info_gan.html">Image similarity search with InfoGAN</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/mnist.html">Handwritten Digit Recognition</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/pretrained_models.html">Using pre-trained models in MXNet</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/index.html">Losses</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/custom-loss.html">Custom Loss Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/kl_divergence.html">Kullback-Leibler (KL) Divergence</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/loss.html">Loss functions</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/text/index.html">Text Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/text/gnmt.html">Google Neural Machine Translation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/text/transformer.html">Machine Translation with Transformer</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/training/index.html">Training</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/fit_api_tutorial.html">MXNet Gluon Fit API</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/trainer.html">Trainer</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/index.html">Learning Rates</a><ul>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_finder.html">Learning Rate Finder</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_schedules.html">Learning Rate Schedules</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_schedules_advanced.html">Advanced Learning Rate Schedules</a></li>
</ul>
</li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/normalization/index.html">Normalization Blocks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/kvstore/index.html">KVStore</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/kvstore/kvstore.html">Distributed Key-Value Store</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/ndarray/index.html">NDArray</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/01-ndarray-intro.html">An Intro: Manipulate Data the MXNet Way with NDArray</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/02-ndarray-operations.html">NDArray Operations</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/03-ndarray-contexts.html">NDArray Contexts</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/gotchas_numpy_in_mxnet.html">Gotchas using NumPy in Apache MXNet</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/index.html">Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/csr.html">CSRNDArray - NDArray in Compressed Sparse Row Storage Format</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/row_sparse.html">RowSparseNDArray - NDArray for Sparse Gradient Updates</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/train.html">Train a Linear Regression Model with Sparse Symbols</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/ndarray/sparse/train_gluon.html">Sparse NDArrays with Gluon</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/onnx/index.html">ONNX</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/fine_tuning_gluon.html">Fine-tuning an ONNX model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/inference_on_onnx_model.html">Running inference on MXNet/Gluon from an ONNX model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/super_resolution.html">Importing an ONNX model into MXNet</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/deploy/export/onnx.html">Export ONNX Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/optimizer/index.html">Optimizers</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/viz/index.html">Visualization</a><ul>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/visualize_graph">Visualize networks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/performance/index.html">Performance</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/performance/compression/index.html">Compression</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/compression/int8.html">Deploy with int-8</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/float16">Float16</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/gradient_compression">Gradient Compression</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/int8_inference.html">GluonCV with Quantized Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/performance/backend/index.html">Accelerated Backend Tools</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/index.html">Intel MKL-DNN</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/mkldnn_quantization.html">Quantize with MKL-DNN backend</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/mkldnn_quantization.html#Improving-accuracy-with-Intel®-Neural-Compressor">Improving accuracy with Intel® Neural Compressor</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/mkldnn/mkldnn_readme.html">Install MXNet with MKL-DNN</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/tensorrt/index.html">TensorRT</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/tensorrt/tensorrt.html">Optimizing Deep Learning Computation Graphs with TensorRT</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/tvm.html">Use TVM</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/profiler.html">Profiling MXNet Models</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/amp.html">Using AMP: Automatic Mixed Precision</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/deploy/index.html">Deployment</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/export/index.html">Export</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/export/onnx.html">Exporting to ONNX format</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/export_network.html">Export Gluon CV Models</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/blocks/save_load_params.html">Save / Load Parameters</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/inference/index.html">Inference</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/cpp.html">Deploy into C++</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/image_classification_jetson.html">Image Classication using pretrained ResNet-50 model on Jetson module</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/scala.html">Deploy into a Java or Scala Environment</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/wine_detector.html">Real-time Object Detection with MXNet On The Raspberry Pi</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/index.html">Run on AWS</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/use_ec2.html">Run on an EC2 Instance</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/use_sagemaker.html">Run on Amazon SageMaker</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/cloud.html">MXNet on the Cloud</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/extend/index.html">Extend</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/extend/custom_layer.html">Custom Layers</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/extend/customop.html">Custom Numpy Operators</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/new_op">New Operator Creation</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/add_op_in_backend">New Operator in MXNet Backend</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../../api/index.html">Python API</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../api/ndarray/index.html">mxnet.ndarray</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/ndarray.html">ndarray</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/contrib/index.html">ndarray.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/image/index.html">ndarray.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/linalg/index.html">ndarray.linalg</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/op/index.html">ndarray.op</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/random/index.html">ndarray.random</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/register/index.html">ndarray.register</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/sparse/index.html">ndarray.sparse</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/ndarray/utils/index.html">ndarray.utils</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/gluon/index.html">mxnet.gluon</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/block.html">gluon.Block</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/hybrid_block.html">gluon.HybridBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/symbol_block.html">gluon.SymbolBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/constant.html">gluon.Constant</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/parameter.html">gluon.Parameter</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/parameter_dict.html">gluon.ParameterDict</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/trainer.html">gluon.Trainer</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/contrib/index.html">gluon.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/data/index.html">gluon.data</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/gluon/data/vision/index.html">data.vision</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/gluon/data/vision/datasets/index.html">vision.datasets</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/gluon/data/vision/transforms/index.html">vision.transforms</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/loss/index.html">gluon.loss</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/model_zoo/index.html">gluon.model_zoo.vision</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/nn/index.html">gluon.nn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/rnn/index.html">gluon.rnn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/utils/index.html">gluon.utils</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/autograd/index.html">mxnet.autograd</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/initializer/index.html">mxnet.initializer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/optimizer/index.html">mxnet.optimizer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/lr_scheduler/index.html">mxnet.lr_scheduler</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/metric/index.html">mxnet.metric</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore/index.html">mxnet.kvstore</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/symbol/index.html">mxnet.symbol</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/symbol.html">symbol</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/contrib/index.html">symbol.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/image/index.html">symbol.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/linalg/index.html">symbol.linalg</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/op/index.html">symbol.op</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/random/index.html">symbol.random</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/register/index.html">symbol.register</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/symbol/sparse/index.html">symbol.sparse</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/module/index.html">mxnet.module</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/contrib/index.html">mxnet.contrib</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/autograd/index.html">contrib.autograd</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/io/index.html">contrib.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/ndarray/index.html">contrib.ndarray</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/onnx/index.html">contrib.onnx</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/quantization/index.html">contrib.quantization</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/symbol/index.html">contrib.symbol</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/tensorboard/index.html">contrib.tensorboard</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/tensorrt/index.html">contrib.tensorrt</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/text/index.html">contrib.text</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/mxnet/index.html">mxnet</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/attribute/index.html">mxnet.attribute</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/base/index.html">mxnet.base</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/callback/index.html">mxnet.callback</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/context/index.html">mxnet.context</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/engine/index.html">mxnet.engine</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/executor/index.html">mxnet.executor</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/executor_manager/index.html">mxnet.executor_manager</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/image/index.html">mxnet.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/io/index.html">mxnet.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/kvstore_server/index.html">mxnet.kvstore_server</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/libinfo/index.html">mxnet.libinfo</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/log/index.html">mxnet.log</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/model/index.html">mxnet.model</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/monitor/index.html">mxnet.monitor</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/name/index.html">mxnet.name</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/notebook/index.html">mxnet.notebook</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/operator/index.html">mxnet.operator</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/profiler/index.html">mxnet.profiler</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/random/index.html">mxnet.random</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/recordio/index.html">mxnet.recordio</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/registry/index.html">mxnet.registry</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/rtc/index.html">mxnet.rtc</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/runtime/index.html">mxnet.runtime</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/test_utils/index.html">mxnet.test_utils</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/torch/index.html">mxnet.torch</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/util/index.html">mxnet.util</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/mxnet/visualization/index.html">mxnet.visualization</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</nav>
</div>
</header>
<div class="document">
<div class="page-content" role="main">
<h1>Source code for mxnet.symbol.contrib</h1><div class="highlight"><pre>
<span></span><span class="c1"># Licensed to the Apache Software Foundation (ASF) under one</span>
<span class="c1"># or more contributor license agreements. See the NOTICE file</span>
<span class="c1"># distributed with this work for additional information</span>
<span class="c1"># regarding copyright ownership. The ASF licenses this file</span>
<span class="c1"># to you under the Apache License, Version 2.0 (the</span>
<span class="c1"># &quot;License&quot;); you may not use this file except in compliance</span>
<span class="c1"># with the License. You may obtain a copy of the License at</span>
<span class="c1">#</span>
<span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="c1">#</span>
<span class="c1"># Unless required by applicable law or agreed to in writing,</span>
<span class="c1"># software distributed under the License is distributed on an</span>
<span class="c1"># &quot;AS IS&quot; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY</span>
<span class="c1"># KIND, either express or implied. See the License for the</span>
<span class="c1"># specific language governing permissions and limitations</span>
<span class="c1"># under the License.</span>
<span class="c1"># coding: utf-8</span>
<span class="c1"># pylint: disable=wildcard-import, unused-wildcard-import,redefined-outer-name</span>
<span class="sd">&quot;&quot;&quot;Contrib Symbol API of MXNet.&quot;&quot;&quot;</span>
<span class="kn">import</span> <span class="nn">math</span>
<span class="kn">import</span> <span class="nn">ctypes</span>
<span class="kn">import</span> <span class="nn">copy</span>
<span class="kn">from</span> <span class="nn">.random</span> <span class="kn">import</span> <span class="n">uniform</span>
<span class="kn">from</span> <span class="nn">.symbol</span> <span class="kn">import</span> <span class="n">Symbol</span>
<span class="k">try</span><span class="p">:</span>
<span class="kn">from</span> <span class="nn">.gen_contrib</span> <span class="kn">import</span> <span class="o">*</span>
<span class="k">except</span> <span class="ne">ImportError</span><span class="p">:</span>
<span class="k">pass</span>
<span class="kn">from</span> <span class="nn">.</span> <span class="kn">import</span> <span class="n">symbol</span>
<span class="kn">from</span> <span class="nn">..base</span> <span class="kn">import</span> <span class="n">_LIB</span><span class="p">,</span> <span class="n">check_call</span>
<span class="kn">from</span> <span class="nn">..base</span> <span class="kn">import</span> <span class="n">SymbolHandle</span><span class="p">,</span> <span class="n">_as_list</span>
<span class="kn">from</span> <span class="nn">..attribute</span> <span class="kn">import</span> <span class="n">AttrScope</span>
<span class="n">__all__</span> <span class="o">=</span> <span class="p">[</span><span class="s2">&quot;rand_zipfian&quot;</span><span class="p">,</span> <span class="s2">&quot;foreach&quot;</span><span class="p">,</span> <span class="s2">&quot;while_loop&quot;</span><span class="p">,</span> <span class="s2">&quot;cond&quot;</span><span class="p">]</span>
<div class="viewcode-block" id="rand_zipfian"><a class="viewcode-back" href="../../../api/symbol/contrib/index.html#mxnet.symbol.contrib.rand_zipfian">[docs]</a><span class="k">def</span> <span class="nf">rand_zipfian</span><span class="p">(</span><span class="n">true_classes</span><span class="p">,</span> <span class="n">num_sampled</span><span class="p">,</span> <span class="n">range_max</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Draw random samples from an approximately log-uniform or Zipfian distribution.</span>
<span class="sd"> This operation randomly samples *num_sampled* candidates the range of integers [0, range_max).</span>
<span class="sd"> The elements of sampled_candidates are drawn with replacement from the base distribution.</span>
<span class="sd"> The base distribution for this operator is an approximately log-uniform or Zipfian distribution:</span>
<span class="sd"> P(class) = (log(class + 2) - log(class + 1)) / log(range_max + 1)</span>
<span class="sd"> This sampler is useful when the true classes approximately follow such a distribution.</span>
<span class="sd"> For example, if the classes represent words in a lexicon sorted in decreasing order of \</span>
<span class="sd"> frequency. If your classes are not ordered by decreasing frequency, do not use this op.</span>
<span class="sd"> Additionaly, it also returns the number of times each of the \</span>
<span class="sd"> true classes and the sampled classes is expected to occur.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> true_classes : Symbol</span>
<span class="sd"> The target classes in 1-D.</span>
<span class="sd"> num_sampled: int</span>
<span class="sd"> The number of classes to randomly sample.</span>
<span class="sd"> range_max: int</span>
<span class="sd"> The number of possible classes.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> samples: Symbol</span>
<span class="sd"> The sampled candidate classes in 1-D `int64` dtype.</span>
<span class="sd"> expected_count_true: Symbol</span>
<span class="sd"> The expected count for true classes in 1-D `float64` dtype.</span>
<span class="sd"> expected_count_sample: Symbol</span>
<span class="sd"> The expected count for sampled candidates in 1-D `float64` dtype.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; true_cls = mx.sym.Variable(&#39;true_cls&#39;)</span>
<span class="sd"> &gt;&gt;&gt; samples, exp_count_true, exp_count_sample = mx.sym.contrib.rand_zipfian(true_cls, 4, 5)</span>
<span class="sd"> &gt;&gt;&gt; samples.eval(true_cls=mx.nd.array([3]))[0].asnumpy()</span>
<span class="sd"> array([1, 3, 3, 3])</span>
<span class="sd"> &gt;&gt;&gt; exp_count_true.eval(true_cls=mx.nd.array([3]))[0].asnumpy()</span>
<span class="sd"> array([0.12453879])</span>
<span class="sd"> &gt;&gt;&gt; exp_count_sample.eval(true_cls=mx.nd.array([3]))[0].asnumpy()</span>
<span class="sd"> array([0.22629439, 0.12453879, 0.12453879, 0.12453879])</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">assert</span><span class="p">(</span><span class="nb">isinstance</span><span class="p">(</span><span class="n">true_classes</span><span class="p">,</span> <span class="n">Symbol</span><span class="p">)),</span> <span class="s2">&quot;unexpected type </span><span class="si">%s</span><span class="s2">&quot;</span> <span class="o">%</span> <span class="nb">type</span><span class="p">(</span><span class="n">true_classes</span><span class="p">)</span>
<span class="n">log_range</span> <span class="o">=</span> <span class="n">math</span><span class="o">.</span><span class="n">log</span><span class="p">(</span><span class="n">range_max</span> <span class="o">+</span> <span class="mi">1</span><span class="p">)</span>
<span class="n">rand</span> <span class="o">=</span> <span class="n">uniform</span><span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="n">log_range</span><span class="p">,</span> <span class="n">shape</span><span class="o">=</span><span class="p">(</span><span class="n">num_sampled</span><span class="p">,),</span> <span class="n">dtype</span><span class="o">=</span><span class="s1">&#39;float64&#39;</span><span class="p">)</span>
<span class="c1"># make sure sampled_classes are in the range of [0, range_max)</span>
<span class="n">sampled_classes</span> <span class="o">=</span> <span class="p">(</span><span class="n">rand</span><span class="o">.</span><span class="n">exp</span><span class="p">()</span> <span class="o">-</span> <span class="mi">1</span><span class="p">)</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="s1">&#39;int64&#39;</span><span class="p">)</span> <span class="o">%</span> <span class="n">range_max</span>
<span class="n">true_classes</span> <span class="o">=</span> <span class="n">true_classes</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="s1">&#39;float64&#39;</span><span class="p">)</span>
<span class="n">expected_prob_true</span> <span class="o">=</span> <span class="p">((</span><span class="n">true_classes</span> <span class="o">+</span> <span class="mf">2.0</span><span class="p">)</span> <span class="o">/</span> <span class="p">(</span><span class="n">true_classes</span> <span class="o">+</span> <span class="mf">1.0</span><span class="p">))</span><span class="o">.</span><span class="n">log</span><span class="p">()</span> <span class="o">/</span> <span class="n">log_range</span>
<span class="n">expected_count_true</span> <span class="o">=</span> <span class="n">expected_prob_true</span> <span class="o">*</span> <span class="n">num_sampled</span>
<span class="c1"># cast sampled classes to fp64 to avoid interget division</span>
<span class="n">sampled_cls_fp64</span> <span class="o">=</span> <span class="n">sampled_classes</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="s1">&#39;float64&#39;</span><span class="p">)</span>
<span class="n">expected_prob_sampled</span> <span class="o">=</span> <span class="p">((</span><span class="n">sampled_cls_fp64</span> <span class="o">+</span> <span class="mf">2.0</span><span class="p">)</span> <span class="o">/</span> <span class="p">(</span><span class="n">sampled_cls_fp64</span> <span class="o">+</span> <span class="mf">1.0</span><span class="p">))</span><span class="o">.</span><span class="n">log</span><span class="p">()</span> <span class="o">/</span> <span class="n">log_range</span>
<span class="n">expected_count_sampled</span> <span class="o">=</span> <span class="n">expected_prob_sampled</span> <span class="o">*</span> <span class="n">num_sampled</span>
<span class="k">return</span> <span class="n">sampled_classes</span><span class="p">,</span> <span class="n">expected_count_true</span><span class="p">,</span> <span class="n">expected_count_sampled</span></div>
<span class="k">def</span> <span class="nf">_flatten</span><span class="p">(</span><span class="n">args</span><span class="p">,</span> <span class="n">inout_str</span><span class="p">):</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">args</span><span class="p">,</span> <span class="n">symbol</span><span class="o">.</span><span class="n">Symbol</span><span class="p">):</span>
<span class="n">length</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">args</span><span class="o">.</span><span class="n">list_outputs</span><span class="p">())</span>
<span class="n">length</span> <span class="o">=</span> <span class="n">length</span> <span class="k">if</span> <span class="n">length</span> <span class="o">&gt;</span> <span class="mi">1</span> <span class="k">else</span> <span class="mi">0</span>
<span class="k">return</span> <span class="p">[</span><span class="n">args</span><span class="p">],</span> <span class="nb">int</span><span class="p">(</span><span class="n">length</span><span class="p">)</span>
<span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">args</span><span class="p">,</span> <span class="p">(</span><span class="nb">list</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)),</span> \
<span class="s2">&quot;</span><span class="si">%s</span><span class="s2"> must be (nested) list of Symbol, &quot;</span> \
<span class="s2">&quot;but got </span><span class="si">%s</span><span class="s2"> of type </span><span class="si">%s</span><span class="s2">&quot;</span><span class="o">%</span><span class="p">(</span><span class="n">inout_str</span><span class="p">,</span> <span class="nb">str</span><span class="p">(</span><span class="n">args</span><span class="p">),</span> <span class="nb">str</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">args</span><span class="p">)))</span>
<span class="n">flat</span> <span class="o">=</span> <span class="p">[]</span>
<span class="n">fmts</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="n">args</span><span class="p">:</span>
<span class="n">arg</span><span class="p">,</span> <span class="n">fmt</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">i</span><span class="p">,</span> <span class="n">inout_str</span><span class="p">)</span>
<span class="n">flat</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span><span class="n">arg</span><span class="p">)</span>
<span class="n">fmts</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">fmt</span><span class="p">)</span>
<span class="k">return</span> <span class="n">flat</span><span class="p">,</span> <span class="n">fmts</span>
<span class="k">def</span> <span class="nf">_regroup</span><span class="p">(</span><span class="n">args</span><span class="p">,</span> <span class="n">fmt</span><span class="p">):</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">fmt</span><span class="p">,</span> <span class="nb">int</span><span class="p">):</span>
<span class="k">if</span> <span class="n">fmt</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span> <span class="n">args</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">args</span><span class="p">[</span><span class="mi">1</span><span class="p">:]</span>
<span class="k">return</span> <span class="n">args</span><span class="p">[:</span><span class="n">fmt</span><span class="p">],</span> <span class="n">args</span><span class="p">[</span><span class="n">fmt</span><span class="p">:]</span>
<span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">args</span><span class="p">,</span> <span class="p">(</span><span class="nb">list</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)),</span> \
<span class="s2">&quot;output must be (nested) list of Symbol, &quot;</span> \
<span class="s2">&quot;but got </span><span class="si">%s</span><span class="s2"> of type </span><span class="si">%s</span><span class="s2">&quot;</span><span class="o">%</span><span class="p">(</span><span class="nb">str</span><span class="p">(</span><span class="n">args</span><span class="p">),</span> <span class="nb">str</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">args</span><span class="p">)))</span>
<span class="n">ret</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="n">fmt</span><span class="p">:</span>
<span class="n">res</span><span class="p">,</span> <span class="n">args</span> <span class="o">=</span> <span class="n">_regroup</span><span class="p">(</span><span class="n">args</span><span class="p">,</span> <span class="n">i</span><span class="p">)</span>
<span class="n">ret</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">res</span><span class="p">)</span>
<span class="k">return</span> <span class="n">ret</span><span class="p">,</span> <span class="n">args</span>
<span class="c1"># We want to generate a unique name for input symbols to a control flow</span>
<span class="c1"># operator. The names are generated on purpose differently from the symbols</span>
<span class="c1"># cut from the graph.</span>
<span class="k">def</span> <span class="nf">_get_sym_uniq_name</span><span class="p">(</span><span class="n">sym</span><span class="p">):</span>
<span class="k">return</span> <span class="s1">&#39;</span><span class="si">{}</span><span class="s1">-</span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">sym</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="n">sym</span><span class="o">.</span><span class="n">attr</span><span class="p">(</span><span class="s1">&#39;_value_index&#39;</span><span class="p">))</span>
<span class="k">def</span> <span class="nf">_get_graph_inputs</span><span class="p">(</span><span class="n">subg</span><span class="p">):</span>
<span class="n">num_handles</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>
<span class="n">handles</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">POINTER</span><span class="p">(</span><span class="n">SymbolHandle</span><span class="p">)()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXSymbolGetInputSymbols</span><span class="p">(</span><span class="n">subg</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">handles</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">num_handles</span><span class="p">)))</span>
<span class="n">syms</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">num_handles</span><span class="o">.</span><span class="n">value</span><span class="p">):</span>
<span class="n">s</span> <span class="o">=</span> <span class="n">Symbol</span><span class="p">(</span><span class="n">ctypes</span><span class="o">.</span><span class="n">cast</span><span class="p">(</span><span class="n">handles</span><span class="p">[</span><span class="n">i</span><span class="p">],</span> <span class="n">SymbolHandle</span><span class="p">))</span>
<span class="n">syms</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">s</span><span class="p">)</span>
<span class="k">return</span> <span class="n">syms</span>
<span class="k">def</span> <span class="nf">_cut_subgraph</span><span class="p">(</span><span class="n">subg</span><span class="p">):</span>
<span class="n">num_handles</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>
<span class="n">handles</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">POINTER</span><span class="p">(</span><span class="n">SymbolHandle</span><span class="p">)()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXSymbolCutSubgraph</span><span class="p">(</span><span class="n">subg</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">handles</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">num_handles</span><span class="p">)))</span>
<span class="n">syms</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">num_handles</span><span class="o">.</span><span class="n">value</span><span class="p">):</span>
<span class="n">s</span> <span class="o">=</span> <span class="n">Symbol</span><span class="p">(</span><span class="n">ctypes</span><span class="o">.</span><span class="n">cast</span><span class="p">(</span><span class="n">handles</span><span class="p">[</span><span class="n">i</span><span class="p">],</span> <span class="n">SymbolHandle</span><span class="p">))</span>
<span class="n">syms</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">s</span><span class="p">)</span>
<span class="k">return</span> <span class="n">syms</span>
<span class="k">def</span> <span class="nf">_get_unique_subgraph_name</span><span class="p">(</span><span class="n">subgraph_name</span><span class="p">):</span>
<span class="n">attrs</span> <span class="o">=</span> <span class="n">AttrScope</span><span class="o">.</span><span class="n">_current</span><span class="o">.</span><span class="n">value</span><span class="o">.</span><span class="n">_attr</span>
<span class="k">if</span> <span class="n">attrs</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s2">&quot;__subgraph_name__&quot;</span><span class="p">,</span> <span class="s2">&quot;&quot;</span><span class="p">)</span> <span class="o">!=</span> <span class="s2">&quot;&quot;</span><span class="p">:</span>
<span class="n">subgraph_name</span> <span class="o">=</span> <span class="s2">&quot;&quot;</span><span class="o">.</span><span class="n">join</span><span class="p">([</span><span class="n">attrs</span><span class="p">[</span><span class="s2">&quot;__subgraph_name__&quot;</span><span class="p">],</span> <span class="s2">&quot;$&quot;</span><span class="p">,</span> <span class="n">subgraph_name</span><span class="p">])</span>
<span class="n">AttrScope</span><span class="o">.</span><span class="n">_subgraph_names</span><span class="p">[</span><span class="n">subgraph_name</span><span class="p">]</span> <span class="o">+=</span> <span class="mi">1</span>
<span class="n">subgraph_name</span> <span class="o">=</span> <span class="n">subgraph_name</span> <span class="o">+</span> <span class="nb">str</span><span class="p">(</span><span class="n">AttrScope</span><span class="o">.</span><span class="n">_subgraph_names</span><span class="p">[</span><span class="n">subgraph_name</span><span class="p">]</span> <span class="o">-</span> <span class="mi">1</span><span class="p">)</span>
<span class="k">return</span> <span class="n">subgraph_name</span>
<span class="c1"># This construct a subgraph for given output nodes.</span>
<span class="c1"># If an output node is one of the input nodes, we call identity to make sure</span>
<span class="c1"># that outputs nodes are different from input nodes.</span>
<span class="k">def</span> <span class="nf">_construct_subgraph</span><span class="p">(</span><span class="n">sym_out</span><span class="p">,</span> <span class="n">sym_states</span><span class="p">,</span> <span class="n">name</span><span class="p">):</span>
<span class="n">sym_out</span> <span class="o">=</span> <span class="n">_as_list</span><span class="p">(</span><span class="n">sym_out</span><span class="p">)</span>
<span class="n">sym_states</span> <span class="o">=</span> <span class="n">_as_list</span><span class="p">(</span><span class="n">sym_states</span><span class="p">)</span>
<span class="n">all_outputs</span> <span class="o">=</span> <span class="p">[]</span>
<span class="n">all_outputs</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span><span class="n">sym_out</span><span class="p">)</span>
<span class="n">all_outputs</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span><span class="n">sym_states</span><span class="p">)</span>
<span class="n">g</span> <span class="o">=</span> <span class="n">symbol</span><span class="o">.</span><span class="n">Group</span><span class="p">(</span><span class="n">all_outputs</span><span class="p">)</span>
<span class="n">flat_out</span> <span class="o">=</span> <span class="p">[]</span>
<span class="n">all_input_names</span> <span class="o">=</span> <span class="n">g</span><span class="o">.</span><span class="n">list_inputs</span><span class="p">()</span>
<span class="n">output_names</span> <span class="o">=</span> <span class="p">{</span><span class="n">o</span><span class="o">.</span><span class="n">name</span> <span class="k">for</span> <span class="n">o</span> <span class="ow">in</span> <span class="n">sym_out</span><span class="p">}</span>
<span class="k">for</span> <span class="n">o</span> <span class="ow">in</span> <span class="n">sym_out</span><span class="p">:</span>
<span class="k">if</span> <span class="n">o</span><span class="o">.</span><span class="n">name</span> <span class="ow">in</span> <span class="n">all_input_names</span> <span class="ow">or</span> <span class="n">o</span><span class="o">.</span><span class="n">list_attr</span><span class="p">()</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s2">&quot;__subgraph_name__&quot;</span><span class="p">,</span> <span class="s2">&quot;&quot;</span><span class="p">)</span> <span class="o">!=</span> <span class="n">name</span><span class="p">:</span>
<span class="n">flat_out</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">symbol</span><span class="o">.</span><span class="n">op</span><span class="o">.</span><span class="n">identity</span><span class="p">(</span><span class="n">o</span><span class="p">))</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">flat_out</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">o</span><span class="p">)</span>
<span class="k">for</span> <span class="n">s</span> <span class="ow">in</span> <span class="n">sym_states</span><span class="p">:</span>
<span class="k">if</span> <span class="n">s</span><span class="o">.</span><span class="n">name</span> <span class="ow">in</span> <span class="n">all_input_names</span> <span class="ow">or</span> <span class="n">s</span><span class="o">.</span><span class="n">name</span> <span class="ow">in</span> <span class="n">output_names</span> <span class="ow">or</span> \
<span class="n">s</span><span class="o">.</span><span class="n">list_attr</span><span class="p">()</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s2">&quot;__subgraph_name__&quot;</span><span class="p">,</span> <span class="s2">&quot;&quot;</span><span class="p">)</span> <span class="o">!=</span> <span class="n">name</span><span class="p">:</span>
<span class="n">flat_out</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">symbol</span><span class="o">.</span><span class="n">op</span><span class="o">.</span><span class="n">identity</span><span class="p">(</span><span class="n">s</span><span class="p">))</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">flat_out</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">s</span><span class="p">)</span>
<span class="k">return</span> <span class="n">symbol</span><span class="o">.</span><span class="n">Group</span><span class="p">(</span><span class="n">flat_out</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_check_data</span><span class="p">(</span><span class="n">inputs</span><span class="p">,</span> <span class="n">in_type</span><span class="p">,</span> <span class="n">msg</span><span class="p">):</span>
<span class="n">is_NDArray_or_list</span> <span class="o">=</span> <span class="kc">True</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">inputs</span><span class="p">,</span> <span class="nb">list</span><span class="p">):</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="n">inputs</span><span class="p">:</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">i</span><span class="p">,</span> <span class="n">in_type</span><span class="p">):</span>
<span class="n">is_NDArray_or_list</span> <span class="o">=</span> <span class="kc">False</span>
<span class="k">break</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">is_NDArray_or_list</span> <span class="o">=</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">inputs</span><span class="p">,</span> <span class="n">in_type</span><span class="p">)</span>
<span class="k">assert</span> <span class="n">is_NDArray_or_list</span><span class="p">,</span> <span class="n">msg</span>
<div class="viewcode-block" id="foreach"><a class="viewcode-back" href="../../../api/symbol/contrib/index.html#mxnet.symbol.contrib.foreach">[docs]</a><span class="k">def</span> <span class="nf">foreach</span><span class="p">(</span><span class="n">body</span><span class="p">,</span> <span class="n">data</span><span class="p">,</span> <span class="n">init_states</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s2">&quot;foreach&quot;</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Run a for loop with user-defined computation over Symbols on dimension 0.</span>
<span class="sd"> This operator simulates a for loop and body has the computation for an iteration</span>
<span class="sd"> of the for loop. It runs the computation in body on each slice from the input</span>
<span class="sd"> NDArrays.</span>
<span class="sd"> body takes two arguments as input and outputs a tuple of two elements,</span>
<span class="sd"> as illustrated below:</span>
<span class="sd"> out, states = body(data1, states)</span>
<span class="sd"> data1 can be either a symbol or a list of symbols. If data is a symbol,</span>
<span class="sd"> data1 is a symbol. Otherwise, data1 is a list of symbols and has the same</span>
<span class="sd"> size as data. states is a list of symbols and have the same size as init_states.</span>
<span class="sd"> Similarly, out can be either a symbol or a list of symbols, which are concatenated</span>
<span class="sd"> as the first output of foreach; states from the last execution of body</span>
<span class="sd"> are the second output of foreach.</span>
<span class="sd"> foreach can output only output data or states. If a user only wants states,</span>
<span class="sd"> the body function can return ([], states). Similarly, if a user only wants</span>
<span class="sd"> output data, the body function can return (out, []).</span>
<span class="sd"> The computation done by this operator is equivalent to the pseudo code below</span>
<span class="sd"> when the input data is NDArray::</span>
<span class="sd"> states = init_states</span>
<span class="sd"> outs = []</span>
<span class="sd"> for i in data.shape[0]:</span>
<span class="sd"> s = data[i]</span>
<span class="sd"> out, states = body(s, states)</span>
<span class="sd"> outs.append(out)</span>
<span class="sd"> outs = stack(*outs)</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> body : a Python function.</span>
<span class="sd"> Define computation in an iteration.</span>
<span class="sd"> data: a symbol or a list of symbols.</span>
<span class="sd"> The input data.</span>
<span class="sd"> init_states: a Symbol or nested lists of symbols.</span>
<span class="sd"> The initial values of the loop states.</span>
<span class="sd"> name: string.</span>
<span class="sd"> The name of the operator.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> outputs: a Symbol or nested lists of Symbols.</span>
<span class="sd"> The output data concatenated from the output of all iterations.</span>
<span class="sd"> states: a Symbol or nested lists of Symbols.</span>
<span class="sd"> The loop states in the last iteration.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; step = lambda data, states: (data + states[0], [states[0] * 2])</span>
<span class="sd"> &gt;&gt;&gt; data = mx.sym.var(&#39;data&#39;)</span>
<span class="sd"> &gt;&gt;&gt; states = [mx.sym.var(&#39;state&#39;)]</span>
<span class="sd"> &gt;&gt;&gt; outs, states = mx.sym.contrib.foreach(step, data, states)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">flatten_data</span><span class="p">,</span> <span class="n">data_fmt</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">data</span><span class="p">,</span> <span class="s2">&quot;foreach input&quot;</span><span class="p">)</span>
<span class="n">_check_data</span><span class="p">(</span><span class="n">flatten_data</span><span class="p">,</span> <span class="n">symbol</span><span class="o">.</span><span class="n">Symbol</span><span class="p">,</span>
<span class="s2">&quot;data should be a symbol or a nested list of symbols&quot;</span><span class="p">)</span>
<span class="n">init_flatten_states</span><span class="p">,</span> <span class="n">init_state_fmt</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">init_states</span><span class="p">,</span> <span class="s2">&quot;foreach states&quot;</span><span class="p">)</span>
<span class="n">_check_data</span><span class="p">(</span><span class="n">init_flatten_states</span><span class="p">,</span> <span class="n">symbol</span><span class="o">.</span><span class="n">Symbol</span><span class="p">,</span>
<span class="s2">&quot;init_states should be a symbol or a nested list of symbols&quot;</span><span class="p">)</span>
<span class="c1"># If the input python function references to the symbols outside</span>
<span class="c1"># the python function, we need to prune the computation graph constructed from</span>
<span class="c1"># the function. One way of doing it is to mark the nodes in the computation graph</span>
<span class="c1"># with AttrScope and prune the nodes without the special attribute.</span>
<span class="n">name</span> <span class="o">=</span> <span class="n">_get_unique_subgraph_name</span><span class="p">(</span><span class="n">name</span><span class="p">)</span>
<span class="k">with</span> <span class="n">AttrScope</span><span class="p">(</span><span class="n">__subgraph_name__</span><span class="o">=</span><span class="n">name</span><span class="p">):</span>
<span class="n">in_eles</span> <span class="o">=</span> <span class="p">[</span><span class="n">symbol</span><span class="o">.</span><span class="n">var</span><span class="p">(</span><span class="n">_get_sym_uniq_name</span><span class="p">(</span><span class="n">sym</span><span class="p">))</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">flatten_data</span><span class="p">]</span>
<span class="n">in_eles</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_regroup</span><span class="p">(</span><span class="n">in_eles</span><span class="p">,</span> <span class="n">data_fmt</span><span class="p">)</span>
<span class="n">states</span> <span class="o">=</span> <span class="p">[</span><span class="n">symbol</span><span class="o">.</span><span class="n">var</span><span class="p">(</span><span class="n">_get_sym_uniq_name</span><span class="p">(</span><span class="n">s</span><span class="p">))</span> <span class="k">for</span> <span class="n">s</span> <span class="ow">in</span> <span class="n">init_flatten_states</span><span class="p">]</span>
<span class="n">states</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_regroup</span><span class="p">(</span><span class="n">states</span><span class="p">,</span> <span class="n">copy</span><span class="o">.</span><span class="n">deepcopy</span><span class="p">(</span><span class="n">init_state_fmt</span><span class="p">))</span>
<span class="n">sym_out</span><span class="p">,</span> <span class="n">sym_states</span> <span class="o">=</span> <span class="n">body</span><span class="p">(</span><span class="n">in_eles</span><span class="p">,</span> <span class="n">states</span><span class="p">)</span>
<span class="n">sym_out</span><span class="p">,</span> <span class="n">out_fmt</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">sym_out</span><span class="p">,</span> <span class="s2">&quot;foreach output&quot;</span><span class="p">)</span>
<span class="n">sym_states</span><span class="p">,</span> <span class="n">state_fmt</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">sym_states</span><span class="p">,</span> <span class="s2">&quot;foreach loop_vars&quot;</span><span class="p">)</span>
<span class="k">assert</span> <span class="n">init_state_fmt</span> <span class="o">==</span> <span class="n">state_fmt</span><span class="p">,</span> <span class="s2">&quot;The input and output loop_vars have different format&quot;</span>
<span class="n">_check_data</span><span class="p">(</span><span class="n">sym_out</span><span class="p">,</span> <span class="n">symbol</span><span class="o">.</span><span class="n">Symbol</span><span class="p">,</span>
<span class="s2">&quot;the output should be an NDArray or a nested list of NDArrays&quot;</span><span class="p">)</span>
<span class="n">_check_data</span><span class="p">(</span><span class="n">sym_states</span><span class="p">,</span> <span class="n">symbol</span><span class="o">.</span><span class="n">Symbol</span><span class="p">,</span>
<span class="s2">&quot;the output states should be an NDArray or a nested list of NDArrays&quot;</span><span class="p">)</span>
<span class="n">num_out_data</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">sym_out</span><span class="p">)</span>
<span class="n">num_states</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">sym_states</span><span class="p">)</span>
<span class="n">num_outputs</span> <span class="o">=</span> <span class="n">num_out_data</span> <span class="o">+</span> <span class="n">num_states</span>
<span class="n">g</span> <span class="o">=</span> <span class="n">_construct_subgraph</span><span class="p">(</span><span class="n">sym_out</span><span class="p">,</span> <span class="n">sym_states</span><span class="p">,</span> <span class="n">name</span><span class="p">)</span>
<span class="n">input_syms</span> <span class="o">=</span> <span class="n">_get_graph_inputs</span><span class="p">(</span><span class="n">g</span><span class="p">)</span>
<span class="n">cut_syms</span> <span class="o">=</span> <span class="n">_cut_subgraph</span><span class="p">(</span><span class="n">g</span><span class="p">)</span>
<span class="n">input_syms</span> <span class="o">=</span> <span class="n">_get_graph_inputs</span><span class="p">(</span><span class="n">g</span><span class="p">)</span>
<span class="c1"># Here we need to find out how the input symbols are ordered as well as</span>
<span class="c1"># where the loop states are located in the list of inputs.</span>
<span class="c1"># This dict contains the symbols of the subgraph.</span>
<span class="n">input_syms</span> <span class="o">=</span> <span class="p">{</span><span class="n">sym</span><span class="o">.</span><span class="n">name</span><span class="p">:</span><span class="n">sym</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">input_syms</span><span class="p">}</span>
<span class="n">gin_names</span> <span class="o">=</span> <span class="n">input_syms</span><span class="o">.</span><span class="n">keys</span><span class="p">()</span>
<span class="c1"># This array contains the symbols for the inputs of foreach.</span>
<span class="c1"># They are ordered according to the inputs of the subgraph.</span>
<span class="n">state_names</span> <span class="o">=</span> <span class="p">[</span><span class="n">_get_sym_uniq_name</span><span class="p">(</span><span class="n">sym</span><span class="p">)</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">init_flatten_states</span><span class="p">]</span>
<span class="n">data_names</span> <span class="o">=</span> <span class="p">[</span><span class="n">_get_sym_uniq_name</span><span class="p">(</span><span class="n">sym</span><span class="p">)</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">flatten_data</span><span class="p">]</span>
<span class="n">cut_var_map</span> <span class="o">=</span> <span class="p">{</span><span class="n">sym</span><span class="o">.</span><span class="n">list_outputs</span><span class="p">()[</span><span class="mi">0</span><span class="p">]:</span><span class="n">sym</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">cut_syms</span><span class="p">}</span>
<span class="n">cut_var_names</span> <span class="o">=</span> <span class="n">cut_var_map</span><span class="o">.</span><span class="n">keys</span><span class="p">()</span>
<span class="n">subg_input_names</span> <span class="o">=</span> <span class="n">g</span><span class="o">.</span><span class="n">list_inputs</span><span class="p">()</span>
<span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="nb">set</span><span class="p">(</span><span class="n">subg_input_names</span><span class="p">))</span> <span class="o">==</span> <span class="nb">len</span><span class="p">(</span><span class="n">subg_input_names</span><span class="p">),</span> \
<span class="s2">&quot;The inputs of the subgraph don&#39;t have unique names: &quot;</span> <span class="o">+</span> <span class="nb">str</span><span class="p">(</span><span class="n">subg_input_names</span><span class="p">)</span>
<span class="c1"># ordered_ins contains input symbols in the following order:</span>
<span class="c1"># data_syms, state_syms, followed by cut_vars and vars in the closure.</span>
<span class="n">ordered_ins</span> <span class="o">=</span> <span class="p">[</span><span class="n">x</span> <span class="k">for</span> <span class="n">x</span> <span class="ow">in</span> <span class="n">flatten_data</span><span class="p">]</span>
<span class="c1"># this defines the location of data_syms in the list of subgraph inputs</span>
<span class="n">in_data_locs</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">dname</span> <span class="ow">in</span> <span class="n">data_names</span><span class="p">:</span>
<span class="c1"># Some data may not be used.</span>
<span class="k">if</span> <span class="n">dname</span> <span class="ow">in</span> <span class="n">subg_input_names</span><span class="p">:</span>
<span class="n">in_data_locs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">subg_input_names</span><span class="o">.</span><span class="n">index</span><span class="p">(</span><span class="n">dname</span><span class="p">))</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">AssertionError</span><span class="p">(</span><span class="s2">&quot;the data arrays have to be used in the loop body&quot;</span><span class="p">)</span>
<span class="n">ordered_ins</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span><span class="n">init_flatten_states</span><span class="p">)</span>
<span class="c1"># this defines the location of state_syms in the list of subgraph inputs.</span>
<span class="n">in_state_locs</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">sname</span> <span class="ow">in</span> <span class="n">state_names</span><span class="p">:</span>
<span class="c1"># Some state may not be used.</span>
<span class="k">if</span> <span class="n">sname</span> <span class="ow">in</span> <span class="n">subg_input_names</span><span class="p">:</span>
<span class="n">in_state_locs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">subg_input_names</span><span class="o">.</span><span class="n">index</span><span class="p">(</span><span class="n">sname</span><span class="p">))</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">AssertionError</span><span class="p">(</span><span class="s2">&quot;the state arrays have to be used in the loop body&quot;</span><span class="p">)</span>
<span class="n">remain_locs</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">in_name</span> <span class="ow">in</span> <span class="n">subg_input_names</span><span class="p">:</span>
<span class="k">assert</span> <span class="n">in_name</span> <span class="ow">in</span> <span class="n">gin_names</span><span class="p">,</span> <span class="s2">&quot;The input variable </span><span class="si">%s</span><span class="s2"> can&#39;t be found in graph inputs: </span><span class="si">%s</span><span class="s2">&quot;</span> \
<span class="o">%</span> <span class="p">(</span><span class="n">in_name</span><span class="p">,</span> <span class="nb">str</span><span class="p">(</span><span class="n">gin_names</span><span class="p">))</span>
<span class="k">if</span> <span class="n">in_name</span> <span class="ow">in</span> <span class="n">cut_var_names</span><span class="p">:</span>
<span class="n">ordered_ins</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">cut_var_map</span><span class="p">[</span><span class="n">in_name</span><span class="p">])</span>
<span class="n">remain_locs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">subg_input_names</span><span class="o">.</span><span class="n">index</span><span class="p">(</span><span class="n">in_name</span><span class="p">))</span>
<span class="k">elif</span> <span class="n">in_name</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">data_names</span> <span class="ow">and</span> <span class="n">in_name</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">state_names</span><span class="p">:</span>
<span class="c1"># The remaining inputs are the variable nodes created inside the UDF.</span>
<span class="c1"># The subgraph can&#39;t have nodes shared with the main graph. As such,</span>
<span class="c1"># we need to make a copy of these variable nodes.</span>
<span class="k">assert</span> <span class="n">in_name</span> <span class="ow">in</span> <span class="n">gin_names</span>
<span class="n">ordered_ins</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">copy</span><span class="o">.</span><span class="n">deepcopy</span><span class="p">(</span><span class="n">input_syms</span><span class="p">[</span><span class="n">in_name</span><span class="p">]))</span>
<span class="n">remain_locs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">subg_input_names</span><span class="o">.</span><span class="n">index</span><span class="p">(</span><span class="n">in_name</span><span class="p">))</span>
<span class="n">ret</span> <span class="o">=</span> <span class="n">symbol</span><span class="o">.</span><span class="n">_internal</span><span class="o">.</span><span class="n">_foreach</span><span class="p">(</span><span class="n">g</span><span class="p">,</span> <span class="o">*</span><span class="n">ordered_ins</span><span class="p">,</span> <span class="n">num_outputs</span><span class="o">=</span><span class="n">num_outputs</span><span class="p">,</span>
<span class="n">num_out_data</span><span class="o">=</span><span class="n">num_out_data</span><span class="p">,</span> <span class="n">in_state_locs</span><span class="o">=</span><span class="n">in_state_locs</span><span class="p">,</span>
<span class="n">in_data_locs</span><span class="o">=</span><span class="n">in_data_locs</span><span class="p">,</span> <span class="n">remain_locs</span><span class="o">=</span><span class="n">remain_locs</span><span class="p">)</span>
<span class="n">outs</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">num_outputs</span> <span class="o">-</span> <span class="n">num_states</span><span class="p">):</span>
<span class="n">outs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">ret</span><span class="p">[</span><span class="n">i</span><span class="p">])</span>
<span class="n">outs</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_regroup</span><span class="p">(</span><span class="n">outs</span><span class="p">,</span> <span class="n">out_fmt</span><span class="p">)</span>
<span class="n">states</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">num_states</span><span class="p">):</span>
<span class="n">states</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">ret</span><span class="p">[</span><span class="n">num_outputs</span> <span class="o">-</span> <span class="n">num_states</span> <span class="o">+</span> <span class="n">i</span><span class="p">])</span>
<span class="n">states</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_regroup</span><span class="p">(</span><span class="n">states</span><span class="p">,</span> <span class="n">state_fmt</span><span class="p">)</span>
<span class="k">return</span> <span class="p">(</span><span class="n">outs</span><span class="p">,</span> <span class="n">states</span><span class="p">)</span></div>
<div class="viewcode-block" id="while_loop"><a class="viewcode-back" href="../../../api/symbol/contrib/index.html#mxnet.symbol.contrib.while_loop">[docs]</a><span class="k">def</span> <span class="nf">while_loop</span><span class="p">(</span><span class="n">cond</span><span class="p">,</span> <span class="n">func</span><span class="p">,</span> <span class="n">loop_vars</span><span class="p">,</span> <span class="n">max_iterations</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s2">&quot;while_loop&quot;</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Run a while loop with user-defined computation and loop condition.</span>
<span class="sd"> This operator simulates a while loop which iterately does customized computation</span>
<span class="sd"> as long as the condition is satisfied.</span>
<span class="sd"> `loop_vars` is a Symbol or nested lists of Symbols on which the computation uses.</span>
<span class="sd"> `cond` is a user-defined function, used as the loop condition.</span>
<span class="sd"> It consumes `loop_vars`, and produces a scalar MXNet symbol,</span>
<span class="sd"> indicating the termination of the loop.</span>
<span class="sd"> The loop ends when `cond` returns false (zero).</span>
<span class="sd"> The `cond` is variadic, and its signature should be</span>
<span class="sd"> `cond(*loop_vars) =&gt; Symbol`.</span>
<span class="sd"> `func` is a user-defined function, used as the loop body.</span>
<span class="sd"> It also consumes `loop_vars`, and produces `step_output` and `new_loop_vars` at each step.</span>
<span class="sd"> In each step, `step_output` should contain the same number elements.</span>
<span class="sd"> Through all steps, the i-th element of `step_output` should have the same shape and dtype.</span>
<span class="sd"> Also, `new_loop_vars` should contain the same number of elements as `loop_vars`,</span>
<span class="sd"> and the corresponding element should have the same shape and dtype.</span>
<span class="sd"> The `func` is variadic, and its signature should be</span>
<span class="sd"> `func(*loop_vars) =&gt;</span>
<span class="sd"> (Symbol or nested List[Symbol] step_output, Symbol or nested List[Symbol] new_loop_vars)`.</span>
<span class="sd"> `max_iterations` is a scalar that defines the maximum number of iterations allowed.</span>
<span class="sd"> This function returns two lists.</span>
<span class="sd"> The first list has the length of `|step_output|`,</span>
<span class="sd"> in which the i-th element are all i-th elements of</span>
<span class="sd"> `step_output` from all steps, stacked along axis 0.</span>
<span class="sd"> The second list has the length of `|loop_vars|`,</span>
<span class="sd"> which represents final states of loop variables.</span>
<span class="sd"> .. warning::</span>
<span class="sd"> For now, the axis 0 of all Symbols in the first list are `max_iterations`,</span>
<span class="sd"> due to lack of dynamic shape inference.</span>
<span class="sd"> .. warning::</span>
<span class="sd"> Even if `cond` is never satisfied,</span>
<span class="sd"> while_loop returns a list of outputs with inferred dtype and shape.</span>
<span class="sd"> This is different from the Symbol version,</span>
<span class="sd"> where in this case `step_outputs` are assumed as an empty list.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> cond: a Python function.</span>
<span class="sd"> The loop condition.</span>
<span class="sd"> func: a Python function.</span>
<span class="sd"> The loop body.</span>
<span class="sd"> loop_vars: a Symbol or nested lists of Symbol.</span>
<span class="sd"> The initial values of the loop variables.</span>
<span class="sd"> max_iterations: a python int.</span>
<span class="sd"> Maximum number of iterations.</span>
<span class="sd"> Returns</span>
<span class="sd"> ------</span>
<span class="sd"> outputs: a Symbol or nested lists of Symbols</span>
<span class="sd"> stacked output from each step</span>
<span class="sd"> states: a Symbol or nested lists of Symbols</span>
<span class="sd"> final state</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; cond = lambda i, s: i &lt;= 5</span>
<span class="sd"> &gt;&gt;&gt; func = lambda i, s: ([i + s], [i + 1, s + i])</span>
<span class="sd"> &gt;&gt;&gt; loop_vars = (mx.sym.var(&#39;i&#39;), mx.sym.var(&#39;s&#39;))</span>
<span class="sd"> &gt;&gt;&gt; outputs, states = mx.sym.contrib.while_loop(cond, func, loop_vars, max_iterations=10)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">def</span> <span class="nf">_to_python_scalar</span><span class="p">(</span><span class="n">inputs</span><span class="p">,</span> <span class="n">type_</span><span class="p">,</span> <span class="n">name</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Converts &quot;inputs&quot;, possibly typed mxnet NDArray, a numpy ndarray, other python types,</span>
<span class="sd"> to the given type</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="nb">hasattr</span><span class="p">(</span><span class="n">inputs</span><span class="p">,</span> <span class="s2">&quot;asscalar&quot;</span><span class="p">):</span>
<span class="n">inputs</span> <span class="o">=</span> <span class="n">inputs</span><span class="o">.</span><span class="n">asscalar</span><span class="p">()</span>
<span class="k">try</span><span class="p">:</span>
<span class="n">inputs</span> <span class="o">=</span> <span class="n">type_</span><span class="p">(</span><span class="n">inputs</span><span class="p">)</span>
<span class="k">except</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;Cannot convert </span><span class="si">%s</span><span class="s2"> to python </span><span class="si">%s</span><span class="s2">&quot;</span> <span class="o">%</span> <span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">type_</span><span class="o">.</span><span class="vm">__name__</span><span class="p">))</span>
<span class="k">return</span> <span class="n">inputs</span>
<span class="k">def</span> <span class="nf">_cond_wrapper</span><span class="p">(</span><span class="n">loop_vars</span><span class="p">):</span>
<span class="n">result</span> <span class="o">=</span> <span class="n">cond</span><span class="p">(</span><span class="o">*</span><span class="n">loop_vars</span><span class="p">)</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">result</span><span class="p">,</span> <span class="n">Symbol</span><span class="p">):</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;Return of cond must be a Symbol&quot;</span><span class="p">)</span>
<span class="k">return</span> <span class="p">[],</span> <span class="p">[</span><span class="n">result</span><span class="p">],</span> <span class="p">[],</span> <span class="p">[]</span>
<span class="k">def</span> <span class="nf">_func_wrapper</span><span class="p">(</span><span class="n">loop_vars</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;This wrapper unifies</span>
<span class="sd"> &quot;func: loop_vars -&gt; new_loop_vars&quot;</span>
<span class="sd"> and &quot;func: loop_vars -&gt; (step_output, new_loop_vars)&quot;</span>
<span class="sd"> into &quot;func: loop_vars -&gt; (list of step_outputs, tuple of new_loop_vars)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">step_output</span><span class="p">,</span> <span class="n">new_loop_vars</span> <span class="o">=</span> <span class="n">func</span><span class="p">(</span><span class="o">*</span><span class="n">loop_vars</span><span class="p">)</span>
<span class="k">if</span> <span class="n">step_output</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">step_output</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">if</span> <span class="n">new_loop_vars</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">new_loop_vars</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">step_output</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">):</span>
<span class="n">step_output</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">step_output</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">new_loop_vars</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">):</span>
<span class="n">new_loop_vars</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">new_loop_vars</span><span class="p">)</span>
<span class="n">step_output</span><span class="p">,</span> <span class="n">out_fmt</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">step_output</span><span class="p">,</span> <span class="s2">&quot;while output&quot;</span><span class="p">)</span>
<span class="n">new_loop_vars</span><span class="p">,</span> <span class="n">var_fmt</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">new_loop_vars</span><span class="p">,</span> <span class="s2">&quot;while loop_vars&quot;</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">loop_vars</span><span class="p">)</span> <span class="o">!=</span> <span class="nb">len</span><span class="p">(</span><span class="n">new_loop_vars</span><span class="p">):</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;The number of loop_vars should be consistent during the loop&quot;</span><span class="p">)</span>
<span class="k">return</span> <span class="n">step_output</span><span class="p">,</span> <span class="n">new_loop_vars</span><span class="p">,</span> <span class="n">out_fmt</span><span class="p">,</span> <span class="n">var_fmt</span>
<span class="k">def</span> <span class="nf">_create_subgraph</span><span class="p">(</span><span class="n">graph_vars</span><span class="p">,</span> <span class="n">graph_func</span><span class="p">,</span> <span class="n">subgraph_name</span><span class="p">):</span>
<span class="n">subgraph_name</span> <span class="o">=</span> <span class="n">_get_unique_subgraph_name</span><span class="p">(</span><span class="n">subgraph_name</span><span class="p">)</span>
<span class="k">with</span> <span class="n">AttrScope</span><span class="p">(</span><span class="n">__subgraph_name__</span><span class="o">=</span><span class="n">subgraph_name</span><span class="p">):</span>
<span class="c1"># create new variables with the same name,</span>
<span class="c1"># them feed them to the given func</span>
<span class="n">graph_vars</span><span class="p">,</span> <span class="n">var_fmt</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">graph_vars</span><span class="p">,</span> <span class="s2">&quot;while loop_vars&quot;</span><span class="p">)</span>
<span class="n">new_graph_vars</span> <span class="o">=</span> <span class="p">[</span><span class="n">symbol</span><span class="o">.</span><span class="n">var</span><span class="p">(</span><span class="n">_get_sym_uniq_name</span><span class="p">(</span><span class="n">sym</span><span class="p">))</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">graph_vars</span><span class="p">]</span>
<span class="n">new_graph_vars</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_regroup</span><span class="p">(</span><span class="n">new_graph_vars</span><span class="p">,</span> <span class="n">var_fmt</span><span class="p">)</span>
<span class="n">outputs</span><span class="p">,</span> <span class="n">final_state</span><span class="p">,</span> <span class="n">out_fmt</span><span class="p">,</span> <span class="n">var_fmt</span> <span class="o">=</span> <span class="n">graph_func</span><span class="p">(</span><span class="n">new_graph_vars</span><span class="p">)</span>
<span class="c1"># first `num_out_data` elements belong to `outputs`</span>
<span class="c1"># other elements belong to `final_state`</span>
<span class="n">num_out_data</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">outputs</span><span class="p">)</span>
<span class="n">num_outputs</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">outputs</span><span class="p">)</span> <span class="o">+</span> <span class="nb">len</span><span class="p">(</span><span class="n">final_state</span><span class="p">)</span>
<span class="c1"># nnvm cut-graph does not allow inputs and outputs overlap</span>
<span class="c1"># so we calculate the name of inputs, and copy outputs once it overlaps with inputs</span>
<span class="c1"># group all outputs of graph_func</span>
<span class="n">all_input_names</span> <span class="o">=</span> <span class="n">symbol</span><span class="o">.</span><span class="n">Group</span><span class="p">(</span><span class="n">outputs</span> <span class="o">+</span> <span class="n">final_state</span><span class="p">)</span><span class="o">.</span><span class="n">list_inputs</span><span class="p">()</span>
<span class="n">in_input</span> <span class="o">=</span> <span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="n">x</span><span class="o">.</span><span class="n">name</span> <span class="ow">in</span> <span class="n">all_input_names</span>
<span class="n">in_graph</span> <span class="o">=</span> <span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="n">x</span><span class="o">.</span><span class="n">list_attr</span><span class="p">()</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s2">&quot;__subgraph_name__&quot;</span><span class="p">,</span> <span class="s2">&quot;&quot;</span><span class="p">)</span> <span class="o">==</span> <span class="n">subgraph_name</span>
<span class="n">make_identity</span> <span class="o">=</span> <span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="n">symbol</span><span class="o">.</span><span class="n">op</span><span class="o">.</span><span class="n">identity</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="k">if</span> <span class="n">in_input</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="ow">or</span> <span class="ow">not</span> <span class="n">in_graph</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> \
<span class="k">else</span> <span class="n">x</span>
<span class="n">graph</span> <span class="o">=</span> <span class="n">symbol</span><span class="o">.</span><span class="n">Group</span><span class="p">(</span><span class="nb">list</span><span class="p">(</span><span class="nb">map</span><span class="p">(</span><span class="n">make_identity</span><span class="p">,</span> <span class="n">outputs</span> <span class="o">+</span> <span class="n">final_state</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">graph</span><span class="p">,</span> <span class="n">num_out_data</span><span class="p">,</span> <span class="n">num_outputs</span><span class="p">,</span> <span class="n">out_fmt</span><span class="p">,</span> <span class="n">var_fmt</span>
<span class="n">flatten_loop_vars</span><span class="p">,</span> <span class="n">init_loop_var_fmt</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">loop_vars</span><span class="p">,</span> <span class="s2">&quot;while loop_vars&quot;</span><span class="p">)</span>
<span class="n">_check_data</span><span class="p">(</span><span class="n">flatten_loop_vars</span><span class="p">,</span> <span class="n">symbol</span><span class="o">.</span><span class="n">Symbol</span><span class="p">,</span>
<span class="s2">&quot;loop_vars should be a symbol or a nested list of symbols&quot;</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_union_inputs</span><span class="p">(</span><span class="o">*</span><span class="n">graphs</span><span class="p">):</span>
<span class="c1"># Given a list of graphs, each whose inputs are either from loop_vars or other variables.</span>
<span class="c1"># 1) calculate a list `inputs`, the union of their inputs.</span>
<span class="c1"># 2) for each graph, determine in which indices their inputs reside in `inputs`</span>
<span class="c1"># 3) for each variable in the input of `graph`, find which index it is</span>
<span class="n">inputs</span> <span class="o">=</span> <span class="p">[]</span> <span class="c1"># List[Symbol], result of 1)</span>
<span class="n">locs</span> <span class="o">=</span> <span class="p">[]</span> <span class="c1"># List[Tuple(List[Int], List[Int])], a list of tuples,</span>
<span class="c1"># where tuples are results of 2) and 3)</span>
<span class="n">input_id_to_loc</span> <span class="o">=</span> <span class="p">{}</span> <span class="c1"># Dict[int, int], given id(sym), input_id_to_loc maps it</span>
<span class="c1"># to a `loc`, where inputs[loc] = sym</span>
<span class="k">for</span> <span class="n">graph</span> <span class="ow">in</span> <span class="n">graphs</span><span class="p">:</span>
<span class="c1"># some loop_vars are inputs to `graph`, some are not</span>
<span class="n">name_to_loop_vars</span> <span class="o">=</span> <span class="p">{</span><span class="n">_get_sym_uniq_name</span><span class="p">(</span><span class="n">sym</span><span class="p">):</span> <span class="n">sym</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">flatten_loop_vars</span><span class="p">}</span>
<span class="c1"># other inputs to `graph` created by cut_graph</span>
<span class="n">name_to_cut_g_syms</span> <span class="o">=</span> <span class="p">{</span><span class="n">sym</span><span class="o">.</span><span class="n">list_outputs</span><span class="p">()[</span><span class="mi">0</span><span class="p">]:</span> <span class="n">sym</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">_cut_subgraph</span><span class="p">(</span><span class="n">graph</span><span class="p">)}</span>
<span class="c1"># input_syms: all inputs to the `graph`</span>
<span class="n">name_to_input_syms</span> <span class="o">=</span> <span class="p">{</span><span class="n">sym</span><span class="o">.</span><span class="n">name</span><span class="p">:</span> <span class="n">sym</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">_get_graph_inputs</span><span class="p">(</span><span class="n">graph</span><span class="p">)}</span>
<span class="c1"># also we collect the mapping from var&#39;s name to var&#39;s loc in loop_vars</span>
<span class="n">name_to_var_locs</span> <span class="o">=</span> <span class="p">{</span><span class="n">_get_sym_uniq_name</span><span class="p">(</span><span class="n">sym</span><span class="p">):</span> <span class="n">i</span> <span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="n">sym</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">flatten_loop_vars</span><span class="p">)}</span>
<span class="c1"># collect arguments for each subgraph</span>
<span class="n">input_locs</span> <span class="o">=</span> <span class="p">[]</span> <span class="c1"># results from the second step</span>
<span class="n">var_locs</span> <span class="o">=</span> <span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">]</span> <span class="o">*</span> <span class="nb">len</span><span class="p">(</span><span class="n">flatten_loop_vars</span><span class="p">)</span> <span class="c1"># results from the third step</span>
<span class="n">subg_input_names</span> <span class="o">=</span> <span class="n">graph</span><span class="o">.</span><span class="n">list_inputs</span><span class="p">()</span>
<span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="nb">set</span><span class="p">(</span><span class="n">subg_input_names</span><span class="p">))</span> <span class="o">==</span> <span class="nb">len</span><span class="p">(</span><span class="n">subg_input_names</span><span class="p">),</span> \
<span class="s2">&quot;The inputs of the subgraph don&#39;t have unique names: &quot;</span> <span class="o">+</span> <span class="nb">str</span><span class="p">(</span><span class="n">subg_input_names</span><span class="p">)</span>
<span class="k">for</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">subg_input_names</span><span class="p">:</span>
<span class="k">assert</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">name_to_input_syms</span> <span class="c1"># it should obviously hold</span>
<span class="c1"># name -&gt; sym</span>
<span class="k">if</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">name_to_loop_vars</span><span class="p">:</span>
<span class="n">sym</span> <span class="o">=</span> <span class="n">name_to_loop_vars</span><span class="p">[</span><span class="n">name</span><span class="p">]</span>
<span class="k">elif</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">name_to_cut_g_syms</span><span class="p">:</span>
<span class="n">sym</span> <span class="o">=</span> <span class="n">name_to_cut_g_syms</span><span class="p">[</span><span class="n">name</span><span class="p">]</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">sym</span> <span class="o">=</span> <span class="n">copy</span><span class="o">.</span><span class="n">deepcopy</span><span class="p">(</span><span class="n">name_to_input_syms</span><span class="p">[</span><span class="n">name</span><span class="p">])</span>
<span class="c1"># do 2), and 1) is implicitly done</span>
<span class="k">if</span> <span class="nb">id</span><span class="p">(</span><span class="n">sym</span><span class="p">)</span> <span class="ow">in</span> <span class="n">input_id_to_loc</span><span class="p">:</span>
<span class="n">loc</span> <span class="o">=</span> <span class="n">input_id_to_loc</span><span class="p">[</span><span class="nb">id</span><span class="p">(</span><span class="n">sym</span><span class="p">)]</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">loc</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">input_id_to_loc</span><span class="p">)</span>
<span class="n">inputs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">sym</span><span class="p">)</span>
<span class="n">input_id_to_loc</span><span class="p">[</span><span class="nb">id</span><span class="p">(</span><span class="n">sym</span><span class="p">)]</span> <span class="o">=</span> <span class="n">loc</span>
<span class="n">input_locs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">loc</span><span class="p">)</span>
<span class="c1"># do 3)</span>
<span class="k">if</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">name_to_var_locs</span><span class="p">:</span>
<span class="n">var_locs</span><span class="p">[</span><span class="n">name_to_var_locs</span><span class="p">[</span><span class="n">name</span><span class="p">]]</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">input_locs</span><span class="p">)</span> <span class="o">-</span> <span class="mi">1</span>
<span class="n">locs</span><span class="o">.</span><span class="n">append</span><span class="p">((</span><span class="n">input_locs</span><span class="p">,</span> <span class="n">var_locs</span><span class="p">))</span>
<span class="k">return</span> <span class="n">inputs</span><span class="p">,</span> <span class="n">locs</span>
<span class="k">if</span> <span class="n">max_iterations</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;max_iterations should be specified&quot;</span><span class="p">)</span>
<span class="n">max_iterations</span> <span class="o">=</span> <span class="n">_to_python_scalar</span><span class="p">(</span><span class="n">max_iterations</span><span class="p">,</span> <span class="nb">int</span><span class="p">,</span> <span class="s2">&quot;max_iteration&quot;</span><span class="p">)</span>
<span class="c1"># It should be work as fine if loop_vars are empty I guess,</span>
<span class="c1"># but it is semantically unnecessary to include this case.</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">loop_vars</span><span class="p">)</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;loop_vars should contain at least one element&quot;</span><span class="p">)</span>
<span class="c1"># create graph for `cond&#39;</span>
<span class="n">cond_g</span><span class="p">,</span> <span class="n">num_out_data</span><span class="p">,</span> <span class="n">num_outputs</span><span class="p">,</span> <span class="n">_</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> \
<span class="n">_create_subgraph</span><span class="p">(</span><span class="n">loop_vars</span><span class="p">,</span> <span class="n">_cond_wrapper</span><span class="p">,</span> <span class="n">name</span> <span class="o">+</span> <span class="s2">&quot;_cond&quot;</span><span class="p">)</span>
<span class="k">assert</span> <span class="n">num_out_data</span> <span class="o">==</span> <span class="mi">0</span>
<span class="k">assert</span> <span class="n">num_outputs</span> <span class="o">==</span> <span class="mi">1</span>
<span class="c1"># create graph for `func`</span>
<span class="n">func_g</span><span class="p">,</span> <span class="n">num_out_data</span><span class="p">,</span> <span class="n">num_outputs</span><span class="p">,</span> <span class="n">out_fmt</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> \
<span class="n">_create_subgraph</span><span class="p">(</span><span class="n">loop_vars</span><span class="p">,</span> <span class="n">_func_wrapper</span><span class="p">,</span> <span class="n">name</span> <span class="o">+</span> <span class="s2">&quot;_func&quot;</span><span class="p">)</span>
<span class="c1"># find symbols used in either cond_g or func_g</span>
<span class="n">input_syms</span><span class="p">,</span> <span class="p">((</span><span class="n">cond_input_locs</span><span class="p">,</span> <span class="n">_</span><span class="p">),</span> <span class="p">(</span><span class="n">func_input_locs</span><span class="p">,</span> <span class="n">func_var_locs</span><span class="p">))</span> <span class="o">=</span> \
<span class="n">_union_inputs</span><span class="p">(</span><span class="n">cond_g</span><span class="p">,</span> <span class="n">func_g</span><span class="p">)</span>
<span class="k">for</span> <span class="n">i_th</span><span class="p">,</span> <span class="n">loc</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">func_var_locs</span><span class="p">,</span> <span class="mi">1</span><span class="p">):</span>
<span class="k">if</span> <span class="n">loc</span> <span class="o">==</span> <span class="o">-</span><span class="mi">1</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;The </span><span class="si">%d</span><span class="s2">-th loop_var doesn&#39;t involve into the computation&quot;</span> <span class="o">%</span> <span class="n">i_th</span><span class="p">)</span>
<span class="n">result</span> <span class="o">=</span> <span class="n">symbol</span><span class="o">.</span><span class="n">_internal</span><span class="o">.</span><span class="n">_while_loop</span><span class="p">(</span>
<span class="n">cond_g</span><span class="p">,</span>
<span class="n">func_g</span><span class="p">,</span>
<span class="o">*</span><span class="n">input_syms</span><span class="p">,</span>
<span class="n">max_iterations</span><span class="o">=</span><span class="n">max_iterations</span><span class="p">,</span>
<span class="n">cond_input_locs</span><span class="o">=</span><span class="n">cond_input_locs</span><span class="p">,</span>
<span class="n">func_input_locs</span><span class="o">=</span><span class="n">func_input_locs</span><span class="p">,</span>
<span class="n">func_var_locs</span><span class="o">=</span><span class="n">func_var_locs</span><span class="p">,</span>
<span class="n">num_out_data</span><span class="o">=</span><span class="n">num_out_data</span><span class="p">,</span>
<span class="n">num_outputs</span><span class="o">=</span><span class="n">num_outputs</span>
<span class="p">)</span>
<span class="n">outputs</span> <span class="o">=</span> <span class="p">[</span><span class="n">result</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">num_out_data</span><span class="p">)]</span>
<span class="n">outputs</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_regroup</span><span class="p">(</span><span class="n">outputs</span><span class="p">,</span> <span class="n">out_fmt</span><span class="p">)</span>
<span class="n">final_loop_vars</span> <span class="o">=</span> <span class="p">[</span><span class="n">result</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">num_out_data</span><span class="p">,</span> <span class="n">num_outputs</span><span class="p">)]</span>
<span class="n">final_loop_vars</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_regroup</span><span class="p">(</span><span class="n">final_loop_vars</span><span class="p">,</span> <span class="n">init_loop_var_fmt</span><span class="p">)</span>
<span class="k">return</span> <span class="n">outputs</span><span class="p">,</span> <span class="n">final_loop_vars</span></div>
<div class="viewcode-block" id="cond"><a class="viewcode-back" href="../../../api/symbol/contrib/index.html#mxnet.symbol.contrib.cond">[docs]</a><span class="k">def</span> <span class="nf">cond</span><span class="p">(</span><span class="n">pred</span><span class="p">,</span> <span class="n">then_func</span><span class="p">,</span> <span class="n">else_func</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s2">&quot;cond&quot;</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Run an if-then-else using user-defined condition and computation</span>
<span class="sd"> This operator simulates a if-like branch which chooses to do one of</span>
<span class="sd"> the two customized computations according to the specified condition.</span>
<span class="sd"> `pred` is a scalar MXNet Symbol,</span>
<span class="sd"> indicating which branch of computation should be used.</span>
<span class="sd"> `then_func` is a user-defined function, used as computation of the then branch.</span>
<span class="sd"> It produces `outputs`, which is a list of Symbols.</span>
<span class="sd"> The signature of `then_func` should be</span>
<span class="sd"> `then_func() =&gt; nested List[Symbol]`.</span>
<span class="sd"> `else_func` is a user-defined function, used as computation of the else branch.</span>
<span class="sd"> It produces `outputs`, which is a list of Symbols.</span>
<span class="sd"> The signature of `else_func` should be</span>
<span class="sd"> `else_func() =&gt; nested List[Symbol]`.</span>
<span class="sd"> The `outputs` produces by `then_func` and `else_func` should have the same number</span>
<span class="sd"> of elements, all of which should be in the same shape, of the same dtype and stype.</span>
<span class="sd"> This function returns a list of symbols, representing the computation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> pred: a MXNet Symbol representing a scalar.</span>
<span class="sd"> The branch condition.</span>
<span class="sd"> then_func: a Python function.</span>
<span class="sd"> The computation to be executed if `pred` is true.</span>
<span class="sd"> else_func: a Python function.</span>
<span class="sd"> The computation to be executed if `pred` is false.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> outputs: a Symbol or nested lists of Symbols, representing the result of computation.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; a, b = mx.sym.var(&#39;a&#39;), mx.sym.var(&#39;b&#39;)</span>
<span class="sd"> &gt;&gt;&gt; pred = a * b &lt; 5</span>
<span class="sd"> &gt;&gt;&gt; then_func = lambda: (a + 5) * (b + 5)</span>
<span class="sd"> &gt;&gt;&gt; else_func = lambda: (a - 5) * (b - 5)</span>
<span class="sd"> &gt;&gt;&gt; outputs = mx.sym.contrib.cond(pred, then_func, else_func)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">def</span> <span class="nf">_create_subgraph</span><span class="p">(</span><span class="n">graph_vars</span><span class="p">,</span> <span class="n">graph_func</span><span class="p">,</span> <span class="n">subgraph_name</span><span class="p">):</span>
<span class="n">subgraph_name</span> <span class="o">=</span> <span class="n">_get_unique_subgraph_name</span><span class="p">(</span><span class="n">subgraph_name</span><span class="p">)</span>
<span class="k">with</span> <span class="n">AttrScope</span><span class="p">(</span><span class="n">__subgraph_name__</span><span class="o">=</span><span class="n">subgraph_name</span><span class="p">):</span>
<span class="c1"># create new variables with the same name,</span>
<span class="c1"># them feed them to the given func</span>
<span class="n">new_graph_vars</span> <span class="o">=</span> <span class="p">[</span><span class="n">symbol</span><span class="o">.</span><span class="n">var</span><span class="p">(</span><span class="n">sym</span><span class="o">.</span><span class="n">name</span><span class="p">)</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">graph_vars</span><span class="p">]</span>
<span class="n">outputs</span> <span class="o">=</span> <span class="n">graph_func</span><span class="p">(</span><span class="o">*</span><span class="n">new_graph_vars</span><span class="p">)</span>
<span class="n">outputs</span><span class="p">,</span> <span class="n">out_fmt</span> <span class="o">=</span> <span class="n">_flatten</span><span class="p">(</span><span class="n">outputs</span><span class="p">,</span> <span class="s2">&quot;cond outputs&quot;</span><span class="p">)</span>
<span class="n">num_outputs</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">outputs</span><span class="p">)</span>
<span class="c1"># nnvm cut-graph does not allow inputs and outputs overlap</span>
<span class="c1"># so we calculate the name of inputs, and copy outputs once it overlaps with inputs</span>
<span class="c1"># group all outputs of graph_func</span>
<span class="n">all_input_names</span> <span class="o">=</span> <span class="n">symbol</span><span class="o">.</span><span class="n">Group</span><span class="p">(</span><span class="n">outputs</span><span class="p">)</span><span class="o">.</span><span class="n">list_inputs</span><span class="p">()</span>
<span class="n">in_input</span> <span class="o">=</span> <span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="n">x</span><span class="o">.</span><span class="n">name</span> <span class="ow">in</span> <span class="n">all_input_names</span>
<span class="n">in_graph</span> <span class="o">=</span> <span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="n">x</span><span class="o">.</span><span class="n">list_attr</span><span class="p">()</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s2">&quot;__subgraph_name__&quot;</span><span class="p">,</span> <span class="s2">&quot;&quot;</span><span class="p">)</span> <span class="o">==</span> <span class="n">subgraph_name</span>
<span class="n">make_identity</span> <span class="o">=</span> <span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="n">symbol</span><span class="o">.</span><span class="n">op</span><span class="o">.</span><span class="n">identity</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="k">if</span> <span class="n">in_input</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="ow">or</span> <span class="ow">not</span> <span class="n">in_graph</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> \
<span class="k">else</span> <span class="n">x</span>
<span class="n">graph</span> <span class="o">=</span> <span class="n">symbol</span><span class="o">.</span><span class="n">Group</span><span class="p">(</span><span class="nb">list</span><span class="p">(</span><span class="nb">map</span><span class="p">(</span><span class="n">make_identity</span><span class="p">,</span> <span class="n">outputs</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">graph</span><span class="p">,</span> <span class="n">num_outputs</span><span class="p">,</span> <span class="n">out_fmt</span>
<span class="k">def</span> <span class="nf">_union_inputs</span><span class="p">(</span><span class="o">*</span><span class="n">graphs</span><span class="p">):</span>
<span class="c1"># Given a list of graphs, each whose inputs are either from input_vars or other variables.</span>
<span class="c1"># 1) calculate a list `inputs`, the union of their inputs.</span>
<span class="c1"># 2) for each graph, determine in which indices their inputs reside in `inputs`</span>
<span class="c1"># 3) for each variable in the input of `graph`, find which index it is</span>
<span class="n">inputs</span> <span class="o">=</span> <span class="p">[]</span> <span class="c1"># List[Symbol], result of 1)</span>
<span class="n">locs</span> <span class="o">=</span> <span class="p">[]</span> <span class="c1"># List[Tuple(List[Int], List[Int])], a list of tuples,</span>
<span class="c1"># where tuples are results of 2) and 3)</span>
<span class="n">input_id_to_loc</span> <span class="o">=</span> <span class="p">{}</span> <span class="c1"># Dict[int, int], given id(sym), input_id_to_loc maps it</span>
<span class="c1"># to a `loc`, where inputs[loc] = sym</span>
<span class="k">for</span> <span class="n">graph</span> <span class="ow">in</span> <span class="n">graphs</span><span class="p">:</span>
<span class="c1"># some input_vars are inputs to `graph`, some are not</span>
<span class="n">name_to_input_vars</span> <span class="o">=</span> <span class="p">{</span><span class="n">sym</span><span class="o">.</span><span class="n">name</span><span class="p">:</span> <span class="n">sym</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">inputs</span><span class="p">}</span>
<span class="c1"># other inputs to `graph` created by cut_graph</span>
<span class="n">name_to_cut_g_syms</span> <span class="o">=</span> <span class="p">{</span><span class="n">sym</span><span class="o">.</span><span class="n">list_outputs</span><span class="p">()[</span><span class="mi">0</span><span class="p">]:</span> <span class="n">sym</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">_cut_subgraph</span><span class="p">(</span><span class="n">graph</span><span class="p">)}</span>
<span class="c1"># input_syms: all inputs to the `graph`</span>
<span class="n">name_to_input_syms</span> <span class="o">=</span> <span class="p">{</span><span class="n">sym</span><span class="o">.</span><span class="n">name</span><span class="p">:</span> <span class="n">sym</span> <span class="k">for</span> <span class="n">sym</span> <span class="ow">in</span> <span class="n">_get_graph_inputs</span><span class="p">(</span><span class="n">graph</span><span class="p">)}</span>
<span class="c1"># collect arguments for each subgraph</span>
<span class="n">input_locs</span> <span class="o">=</span> <span class="p">[]</span> <span class="c1"># results from the second step</span>
<span class="k">for</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">graph</span><span class="o">.</span><span class="n">list_inputs</span><span class="p">():</span>
<span class="k">assert</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">name_to_input_syms</span> <span class="c1"># it should obviously hold</span>
<span class="c1"># name -&gt; sym</span>
<span class="k">if</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">name_to_input_vars</span><span class="p">:</span>
<span class="n">sym</span> <span class="o">=</span> <span class="n">name_to_input_vars</span><span class="p">[</span><span class="n">name</span><span class="p">]</span>
<span class="k">elif</span> <span class="n">name</span> <span class="ow">in</span> <span class="n">name_to_cut_g_syms</span><span class="p">:</span>
<span class="n">sym</span> <span class="o">=</span> <span class="n">name_to_cut_g_syms</span><span class="p">[</span><span class="n">name</span><span class="p">]</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">sym</span> <span class="o">=</span> <span class="n">copy</span><span class="o">.</span><span class="n">deepcopy</span><span class="p">(</span><span class="n">name_to_input_syms</span><span class="p">[</span><span class="n">name</span><span class="p">])</span>
<span class="c1"># do 2), and 1) is implicitly done</span>
<span class="k">if</span> <span class="nb">id</span><span class="p">(</span><span class="n">sym</span><span class="p">)</span> <span class="ow">in</span> <span class="n">input_id_to_loc</span><span class="p">:</span>
<span class="n">loc</span> <span class="o">=</span> <span class="n">input_id_to_loc</span><span class="p">[</span><span class="nb">id</span><span class="p">(</span><span class="n">sym</span><span class="p">)]</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">loc</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">input_id_to_loc</span><span class="p">)</span>
<span class="n">inputs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">sym</span><span class="p">)</span>
<span class="n">input_id_to_loc</span><span class="p">[</span><span class="nb">id</span><span class="p">(</span><span class="n">sym</span><span class="p">)]</span> <span class="o">=</span> <span class="n">loc</span>
<span class="n">input_locs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">loc</span><span class="p">)</span>
<span class="n">locs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">input_locs</span><span class="p">)</span>
<span class="k">return</span> <span class="n">inputs</span><span class="p">,</span> <span class="n">locs</span>
<span class="n">inputs</span> <span class="o">=</span> <span class="p">[]</span>
<span class="c1"># create graph for `cond_func&#39;</span>
<span class="n">cond_g</span><span class="p">,</span> <span class="n">cond_num_outputs</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_create_subgraph</span><span class="p">(</span><span class="n">inputs</span><span class="p">,</span> <span class="k">lambda</span><span class="p">:</span> <span class="n">pred</span><span class="p">,</span> <span class="n">name</span> <span class="o">+</span> <span class="s2">&quot;_pred&quot;</span><span class="p">)</span>
<span class="k">if</span> <span class="n">cond_num_outputs</span> <span class="o">!=</span> <span class="mi">1</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;pred should always be a single output&quot;</span><span class="p">)</span>
<span class="c1"># create graph for `then`</span>
<span class="n">then_g</span><span class="p">,</span> <span class="n">then_num_outputs</span><span class="p">,</span> <span class="n">then_fmt</span> <span class="o">=</span> <span class="n">_create_subgraph</span><span class="p">(</span><span class="n">inputs</span><span class="p">,</span> <span class="n">then_func</span><span class="p">,</span> <span class="n">name</span> <span class="o">+</span> <span class="s2">&quot;_then&quot;</span><span class="p">)</span>
<span class="c1"># create graph for `else`</span>
<span class="n">else_g</span><span class="p">,</span> <span class="n">else_num_outputs</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_create_subgraph</span><span class="p">(</span><span class="n">inputs</span><span class="p">,</span> <span class="n">else_func</span><span class="p">,</span> <span class="n">name</span> <span class="o">+</span> <span class="s2">&quot;_else&quot;</span><span class="p">)</span>
<span class="k">if</span> <span class="n">then_num_outputs</span> <span class="o">!=</span> <span class="n">else_num_outputs</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;Number of outputs differs between then-branch and else-branch&quot;</span><span class="p">)</span>
<span class="c1"># find symbols used in either cond_g or func_g</span>
<span class="n">input_syms</span><span class="p">,</span> <span class="p">(</span><span class="n">cond_input_locs</span><span class="p">,</span> <span class="n">then_input_locs</span><span class="p">,</span> <span class="n">else_input_locs</span><span class="p">)</span> <span class="o">=</span> \
<span class="n">_union_inputs</span><span class="p">(</span><span class="n">cond_g</span><span class="p">,</span> <span class="n">then_g</span><span class="p">,</span> <span class="n">else_g</span><span class="p">)</span>
<span class="n">result</span> <span class="o">=</span> <span class="n">symbol</span><span class="o">.</span><span class="n">_internal</span><span class="o">.</span><span class="n">_cond</span><span class="p">(</span>
<span class="c1"># [cond, then_g, else_g, *input_syms]</span>
<span class="n">cond_g</span><span class="p">,</span>
<span class="n">then_g</span><span class="p">,</span>
<span class="n">else_g</span><span class="p">,</span>
<span class="o">*</span><span class="n">input_syms</span><span class="p">,</span>
<span class="n">cond_input_locs</span><span class="o">=</span><span class="n">cond_input_locs</span><span class="p">,</span>
<span class="n">then_input_locs</span><span class="o">=</span><span class="n">then_input_locs</span><span class="p">,</span>
<span class="n">else_input_locs</span><span class="o">=</span><span class="n">else_input_locs</span><span class="p">,</span>
<span class="n">num_outputs</span><span class="o">=</span><span class="n">then_num_outputs</span>
<span class="p">)</span>
<span class="n">outputs</span> <span class="o">=</span> <span class="p">[</span><span class="n">result</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">then_num_outputs</span><span class="p">)]</span>
<span class="n">outputs</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_regroup</span><span class="p">(</span><span class="n">outputs</span><span class="p">,</span> <span class="n">then_fmt</span><span class="p">)</span>
<span class="k">return</span> <span class="n">outputs</span></div>
<span class="k">def</span> <span class="nf">adamw_update</span><span class="p">(</span><span class="n">weight</span><span class="p">,</span> <span class="n">grad</span><span class="p">,</span> <span class="n">mean</span><span class="p">,</span> <span class="n">var</span><span class="p">,</span> <span class="n">rescale_grad</span><span class="p">,</span> <span class="n">lr</span><span class="p">,</span> <span class="n">eta</span><span class="p">,</span> <span class="n">beta1</span><span class="o">=</span><span class="mf">0.9</span><span class="p">,</span> <span class="n">beta2</span><span class="o">=</span><span class="mf">0.999</span><span class="p">,</span>
<span class="n">epsilon</span><span class="o">=</span><span class="mf">1e-8</span><span class="p">,</span> <span class="n">wd</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">clip_gradient</span><span class="o">=-</span><span class="mi">1</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">rescale_grad</span><span class="p">,</span> <span class="n">Symbol</span><span class="p">):</span>
<span class="n">rescale_grad</span> <span class="o">=</span> <span class="n">symbol</span><span class="o">.</span><span class="n">full</span><span class="p">(</span><span class="n">shape</span><span class="o">=</span><span class="p">(</span><span class="mi">1</span><span class="p">,),</span> <span class="n">val</span><span class="o">=</span><span class="n">rescale_grad</span><span class="p">)</span>
<span class="k">return</span> <span class="n">symbol</span><span class="o">.</span><span class="n">_internal</span><span class="o">.</span><span class="n">_adamw_update</span><span class="p">(</span><span class="n">weight</span><span class="o">=</span><span class="n">weight</span><span class="p">,</span> <span class="n">grad</span><span class="o">=</span><span class="n">grad</span><span class="p">,</span> <span class="n">mean</span><span class="o">=</span><span class="n">mean</span><span class="p">,</span> <span class="n">var</span><span class="o">=</span><span class="n">var</span><span class="p">,</span>
<span class="n">rescale_grad</span><span class="o">=</span><span class="n">rescale_grad</span><span class="p">,</span> <span class="n">lr</span><span class="o">=</span><span class="n">lr</span><span class="p">,</span> <span class="n">eta</span><span class="o">=</span><span class="n">eta</span><span class="p">,</span>
<span class="n">beta1</span><span class="o">=</span><span class="n">beta1</span><span class="p">,</span> <span class="n">beta2</span><span class="o">=</span><span class="n">beta2</span><span class="p">,</span> <span class="n">epsilon</span><span class="o">=</span><span class="n">epsilon</span><span class="p">,</span>
<span class="n">wd</span><span class="o">=</span><span class="n">wd</span><span class="p">,</span> <span class="n">clip_gradient</span><span class="o">=</span><span class="n">clip_gradient</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="n">out</span><span class="p">,</span>
<span class="n">name</span><span class="o">=</span><span class="n">name</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">mp_adamw_update</span><span class="p">(</span><span class="n">weight</span><span class="p">,</span> <span class="n">grad</span><span class="p">,</span> <span class="n">mean</span><span class="p">,</span> <span class="n">var</span><span class="p">,</span> <span class="n">weight32</span><span class="p">,</span> <span class="n">rescale_grad</span><span class="p">,</span> <span class="n">lr</span><span class="p">,</span> <span class="n">eta</span><span class="p">,</span> <span class="n">beta1</span><span class="o">=</span><span class="mf">0.9</span><span class="p">,</span>
<span class="n">beta2</span><span class="o">=</span><span class="mf">0.999</span><span class="p">,</span> <span class="n">epsilon</span><span class="o">=</span><span class="mf">1e-8</span><span class="p">,</span> <span class="n">wd</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">clip_gradient</span><span class="o">=-</span><span class="mi">1</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">name</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">rescale_grad</span><span class="p">,</span> <span class="n">Symbol</span><span class="p">):</span>
<span class="n">rescale_grad</span> <span class="o">=</span> <span class="n">symbol</span><span class="o">.</span><span class="n">full</span><span class="p">(</span><span class="n">shape</span><span class="o">=</span><span class="p">(</span><span class="mi">1</span><span class="p">,),</span> <span class="n">val</span><span class="o">=</span><span class="n">rescale_grad</span><span class="p">)</span>
<span class="k">return</span> <span class="n">symbol</span><span class="o">.</span><span class="n">_internal</span><span class="o">.</span><span class="n">_mp_adamw_update</span><span class="p">(</span><span class="n">weight</span><span class="o">=</span><span class="n">weight</span><span class="p">,</span> <span class="n">grad</span><span class="o">=</span><span class="n">grad</span><span class="p">,</span> <span class="n">mean</span><span class="o">=</span><span class="n">mean</span><span class="p">,</span> <span class="n">var</span><span class="o">=</span><span class="n">var</span><span class="p">,</span>
<span class="n">weight32</span><span class="o">=</span><span class="n">weight32</span><span class="p">,</span>
<span class="n">rescale_grad</span><span class="o">=</span><span class="n">rescale_grad</span><span class="p">,</span> <span class="n">lr</span><span class="o">=</span><span class="n">lr</span><span class="p">,</span> <span class="n">eta</span><span class="o">=</span><span class="n">eta</span><span class="p">,</span>
<span class="n">beta1</span><span class="o">=</span><span class="n">beta1</span><span class="p">,</span> <span class="n">beta2</span><span class="o">=</span><span class="n">beta2</span><span class="p">,</span> <span class="n">epsilon</span><span class="o">=</span><span class="n">epsilon</span><span class="p">,</span>
<span class="n">wd</span><span class="o">=</span><span class="n">wd</span><span class="p">,</span> <span class="n">clip_gradient</span><span class="o">=</span><span class="n">clip_gradient</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="n">out</span><span class="p">,</span>
<span class="n">name</span><span class="o">=</span><span class="n">name</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
</pre></div>
<hr class="feedback-hr-top" />
<div class="feedback-container">
<div class="feedback-question">Did this page help you?</div>
<div class="feedback-answer-container">
<div class="feedback-answer yes-link" data-response="yes">Yes</div>
<div class="feedback-answer no-link" data-response="no">No</div>
</div>
<div class="feedback-thank-you">Thanks for your feedback!</div>
</div>
<hr class="feedback-hr-bottom" />
</div>
<div class="side-doc-outline">
<div class="side-doc-outline--content">
</div>
</div>
<div class="clearer"></div>
</div><div class="pagenation">
</div>
<footer class="site-footer h-card">
<div class="wrapper">
<div class="row">
<div class="col-4">
<h4 class="footer-category-title">Resources</h4>
<ul class="contact-list">
<li><a class="u-email" href="mailto:dev@mxnet.apache.org">Dev list</a></li>
<li><a class="u-email" href="mailto:user@mxnet.apache.org">User mailing list</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/MXNET/Apache+MXNet+Home">Developer Wiki</a></li>
<li><a href="https://issues.apache.org/jira/projects/MXNET/issues">Jira Tracker</a></li>
<li><a href="https://github.com/apache/mxnet/labels/Roadmap">Github Roadmap</a></li>
<li><a href="https://medium.com/apache-mxnet">Blog</a></li>
<li><a href="https://discuss.mxnet.io">Forum</a></li>
<li><a href="/community/contribute">Contribute</a></li>
</ul>
</div>
<div class="col-4"><ul class="social-media-list"><li><a href="https://github.com/apache/mxnet"><svg class="svg-icon"><use xlink:href="../../../_static/minima-social-icons.svg#github"></use></svg> <span class="username">apache/mxnet</span></a></li><li><a href="https://www.twitter.com/apachemxnet"><svg class="svg-icon"><use xlink:href="../../../_static/minima-social-icons.svg#twitter"></use></svg> <span class="username">apachemxnet</span></a></li><li><a href="https://youtube.com/apachemxnet"><svg class="svg-icon"><use xlink:href="../../../_static/minima-social-icons.svg#youtube"></use></svg> <span class="username">apachemxnet</span></a></li></ul>
</div>
<div class="col-4 footer-text">
<p>A flexible and efficient library for deep learning.</p>
</div>
</div>
</div>
</footer>
<footer class="site-footer2">
<div class="wrapper">
<div class="row">
<div class="col-3">
<img src="../../../_static/apache_incubator_logo.png" class="footer-logo col-2">
</div>
<div class="footer-bottom-warning col-9">
<p>Apache MXNet is an effort undergoing incubation at <a href="http://www.apache.org/">The Apache Software Foundation</a> (ASF), <span style="font-weight:bold">sponsored by the <i>Apache Incubator</i></span>. Incubation is required
of all newly accepted projects until a further review indicates that the infrastructure,
communications, and decision making process have stabilized in a manner consistent with other
successful ASF projects. While incubation status is not necessarily a reflection of the completeness
or stability of the code, it does indicate that the project has yet to be fully endorsed by the ASF.
</p><p>"Copyright © 2017-2018, The Apache Software Foundation Apache MXNet, MXNet, Apache, the Apache
feather, and the Apache MXNet project logo are either registered trademarks or trademarks of the
Apache Software Foundation."</p>
</div>
</div>
</div>
</footer>
</body>
</html>