blob: ae97803a2255d7b146e3bd54993b39263c886e34 [file] [log] [blame]
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta charset="utf-8" />
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta http-equiv="x-ua-compatible" content="ie=edge">
<style>
.dropdown {
position: relative;
display: inline-block;
}
.dropdown-content {
display: none;
position: absolute;
background-color: #f9f9f9;
min-width: 160px;
box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2);
padding: 12px 16px;
z-index: 1;
text-align: left;
}
.dropdown:hover .dropdown-content {
display: block;
}
.dropdown-option:hover {
color: #FF4500;
}
.dropdown-option-active {
color: #FF4500;
font-weight: lighter;
}
.dropdown-option {
color: #000000;
font-weight: lighter;
}
.dropdown-header {
color: #FFFFFF;
display: inline-flex;
}
.dropdown-caret {
width: 18px;
height: 54px;
}
.dropdown-caret-path {
fill: #FFFFFF;
}
</style>
<title>mxnet.ndarray.ndarray &#8212; Apache MXNet documentation</title>
<link rel="stylesheet" href="../../../_static/basic.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
<link rel="stylesheet" type="text/css" href="../../../_static/mxnet.css" />
<link rel="stylesheet" href="../../../_static/material-design-lite-1.3.0/material.blue-deep_orange.min.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/sphinx_materialdesign_theme.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/fontawesome/all.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/fonts.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/feedback.css" type="text/css" />
<script id="documentation_options" data-url_root="../../../" src="../../../_static/documentation_options.js"></script>
<script src="../../../_static/jquery.js"></script>
<script src="../../../_static/underscore.js"></script>
<script src="../../../_static/doctools.js"></script>
<script src="../../../_static/language_data.js"></script>
<script src="../../../_static/matomo_analytics.js"></script>
<script src="../../../_static/autodoc.js"></script>
<script crossorigin="anonymous" integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA=" src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"></script>
<script async="async" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script>
<script type="text/x-mathjax-config">MathJax.Hub.Config({"tex2jax": {"inlineMath": [["$", "$"], ["\\(", "\\)"]], "processEscapes": true, "ignoreClass": "document", "processClass": "math|output_area"}})</script>
<script src="../../../_static/sphinx_materialdesign_theme.js"></script>
<link rel="shortcut icon" href="../../../_static/mxnet-icon.png"/>
<link rel="index" title="Index" href="../../../genindex.html" />
<link rel="search" title="Search" href="../../../search.html" />
</head>
<body><header class="site-header" role="banner">
<div class="wrapper">
<a class="site-title" rel="author" href="/"><img
src="../../../_static/mxnet_logo.png" class="site-header-logo"></a>
<nav class="site-nav">
<input type="checkbox" id="nav-trigger" class="nav-trigger"/>
<label for="nav-trigger">
<span class="menu-icon">
<svg viewBox="0 0 18 15" width="18px" height="15px">
<path d="M18,1.484c0,0.82-0.665,1.484-1.484,1.484H1.484C0.665,2.969,0,2.304,0,1.484l0,0C0,0.665,0.665,0,1.484,0 h15.032C17.335,0,18,0.665,18,1.484L18,1.484z M18,7.516C18,8.335,17.335,9,16.516,9H1.484C0.665,9,0,8.335,0,7.516l0,0 c0-0.82,0.665-1.484,1.484-1.484h15.032C17.335,6.031,18,6.696,18,7.516L18,7.516z M18,13.516C18,14.335,17.335,15,16.516,15H1.484 C0.665,15,0,14.335,0,13.516l0,0c0-0.82,0.665-1.483,1.484-1.483h15.032C17.335,12.031,18,12.695,18,13.516L18,13.516z"/>
</svg>
</span>
</label>
<div class="trigger">
<a class="page-link" href="/get_started">Get Started</a>
<a class="page-link" href="/features">Features</a>
<a class="page-link" href="/ecosystem">Ecosystem</a>
<a class="page-link page-current" href="/api">Docs & Tutorials</a>
<a class="page-link" href="/trusted_by">Trusted By</a>
<a class="page-link" href="https://github.com/apache/mxnet">GitHub</a>
<div class="dropdown" style="min-width:100px">
<span class="dropdown-header">Apache
<svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
</span>
<div class="dropdown-content" style="min-width:250px">
<a href="https://www.apache.org/foundation/">Apache Software Foundation</a>
<a href="https://incubator.apache.org/">Apache Incubator</a>
<a href="https://www.apache.org/licenses/">License</a>
<a href="/versions/1.9.1/api/faq/security.html">Security</a>
<a href="https://privacy.apache.org/policies/privacy-policy-public.html">Privacy</a>
<a href="https://www.apache.org/events/current-event">Events</a>
<a href="https://www.apache.org/foundation/sponsorship.html">Sponsorship</a>
<a href="https://www.apache.org/foundation/thanks.html">Thanks</a>
</div>
</div>
<div class="dropdown">
<span class="dropdown-header">master
<svg class="dropdown-caret" viewBox="0 0 32 32" class="icon icon-caret-bottom" aria-hidden="true"><path class="dropdown-caret-path" d="M24 11.305l-7.997 11.39L8 11.305z"></path></svg>
</span>
<div class="dropdown-content">
<a class="dropdown-option-active" href="/versions/master/">master</a><br>
<a class="dropdown-option" href="/versions/1.9.1/">1.9.1</a><br>
<a class="dropdown-option" href="/versions/1.8.0/">1.8.0</a><br>
<a class="dropdown-option" href="/versions/1.7.0/">1.7.0</a><br>
<a class="dropdown-option" href="/versions/1.6.0/">1.6.0</a><br>
<a class="dropdown-option" href="/versions/1.5.0/">1.5.0</a><br>
<a class="dropdown-option" href="/versions/1.4.1/">1.4.1</a><br>
<a class="dropdown-option" href="/versions/1.3.1/">1.3.1</a><br>
<a class="dropdown-option" href="/versions/1.2.1/">1.2.1</a><br>
<a class="dropdown-option" href="/versions/1.1.0/">1.1.0</a><br>
<a class="dropdown-option" href="/versions/1.0.0/">1.0.0</a><br>
<a class="dropdown-option" href="/versions/0.12.1/">0.12.1</a><br>
<a class="dropdown-option" href="/versions/0.11.0/">0.11.0</a>
</div>
</div>
</div>
</nav>
</div>
</header>
<div class="mdl-layout mdl-js-layout mdl-layout--fixed-header mdl-layout--fixed-drawer"><header class="mdl-layout__header mdl-layout__header--waterfall ">
<div class="mdl-layout__header-row">
<nav class="mdl-navigation breadcrumb">
<a class="mdl-navigation__link" href="../../index.html">Module code</a><i class="material-icons">navigate_next</i>
<a class="mdl-navigation__link is-active">mxnet.ndarray.ndarray</a>
</nav>
<div class="mdl-layout-spacer"></div>
<nav class="mdl-navigation">
<form class="form-inline pull-sm-right" action="../../../search.html" method="get">
<div class="mdl-textfield mdl-js-textfield mdl-textfield--expandable mdl-textfield--floating-label mdl-textfield--align-right">
<label id="quick-search-icon" class="mdl-button mdl-js-button mdl-button--icon" for="waterfall-exp">
<i class="material-icons">search</i>
</label>
<div class="mdl-textfield__expandable-holder">
<input class="mdl-textfield__input" type="text" name="q" id="waterfall-exp" placeholder="Search" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</div>
</div>
<div class="mdl-tooltip" data-mdl-for="quick-search-icon">
Quick search
</div>
</form>
<a id="button-show-github"
href="https://github.com/apache/mxnet/edit/master/docs/python_docs/python/_modules/mxnet/ndarray/ndarray" class="mdl-button mdl-js-button mdl-button--icon">
<i class="material-icons">edit</i>
</a>
<div class="mdl-tooltip" data-mdl-for="button-show-github">
Edit on Github
</div>
</nav>
</div>
<div class="mdl-layout__header-row header-links">
<div class="mdl-layout-spacer"></div>
<nav class="mdl-navigation">
</nav>
</div>
</header><header class="mdl-layout__drawer">
<div class="globaltoc">
<span class="mdl-layout-title toc">Table Of Contents</span>
<nav class="mdl-navigation">
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../../tutorials/index.html">Python Tutorials</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/getting-started/index.html">Getting Started</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/index.html">Crash Course</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/0-introduction.html">Introduction</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/1-nparray.html">Step 1: Manipulate data with NP on MXNet</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/2-create-nn.html">Step 2: Create a neural network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/3-autograd.html">Step 3: Automatic differentiation with autograd</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/4-components.html">Step 4: Necessary components that are not in the network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/5-datasets.html">Step 5: <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s and <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/5-datasets.html#Using-own-data-with-included-Datasets">Using own data with included <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/5-datasets.html#Using-your-own-data-with-custom-Datasets">Using your own data with custom <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/5-datasets.html#New-in-MXNet-2.0:-faster-C++-backend-dataloaders">New in MXNet 2.0: faster C++ backend dataloaders</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/6-train-nn.html">Step 6: Train a Neural Network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/7-use-gpus.html">Step 7: Load and Run a NN using GPU</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/to-mxnet/index.html">Moving to MXNet from Other Frameworks</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/to-mxnet/pytorch.html">PyTorch vs Apache MXNet</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/gluon_from_experiment_to_deployment.html">Gluon: from experiment to deployment</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/gluon_migration_guide.html">Gluon2.0: Migration Guide</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/logistic_regression_explained.html">Logistic regression explained</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/image/mnist.html">MNIST</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/packages/index.html">Packages</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/autograd/index.html">Automatic Differentiation</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/gluon/index.html">Gluon</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/index.html">Blocks</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/custom-layer.html">Custom Layers</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/hybridize.html">Hybridize</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/init.html">Initialization</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/naming.html">Parameter and Block Naming</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/nn.html">Layers and Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/parameters.html">Parameter Management</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/save_load_params.html">Saving and Loading Gluon Models</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/activations/activations.html">Activation Blocks</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/data/index.html">Data Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html">Image Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html">Gluon <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s and <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Using-own-data-with-included-Datasets">Using own data with included <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Using-own-data-with-custom-Datasets">Using own data with custom <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Appendix:-Upgrading-from-Module-DataIter-to-Gluon-DataLoader">Appendix: Upgrading from Module <code class="docutils literal notranslate"><span class="pre">DataIter</span></code> to Gluon <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/image/index.html">Image Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/info_gan.html">Image similarity search with InfoGAN</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/mnist.html">Handwritten Digit Recognition</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/index.html">Losses</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/custom-loss.html">Custom Loss Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/kl_divergence.html">Kullback-Leibler (KL) Divergence</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/loss.html">Loss functions</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/text/index.html">Text Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/text/gnmt.html">Google Neural Machine Translation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/text/transformer.html">Machine Translation with Transformer</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/training/index.html">Training</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/fit_api_tutorial.html">MXNet Gluon Fit API</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/trainer.html">Trainer</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/index.html">Learning Rates</a><ul>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_finder.html">Learning Rate Finder</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_schedules.html">Learning Rate Schedules</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_schedules_advanced.html">Advanced Learning Rate Schedules</a></li>
</ul>
</li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/normalization/index.html">Normalization Blocks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/kvstore/index.html">KVStore</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/kvstore/kvstore.html">Distributed Key-Value Store</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/legacy/index.html">Legacy</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/index.html">NDArray</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/01-ndarray-intro.html">An Intro: Manipulate Data the MXNet Way with NDArray</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/02-ndarray-operations.html">NDArray Operations</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/03-ndarray-contexts.html">NDArray Contexts</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/gotchas_numpy_in_mxnet.html">Gotchas using NumPy in Apache MXNet</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/sparse/index.html">Tutorials</a><ul>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/sparse/csr.html">CSRNDArray - NDArray in Compressed Sparse Row Storage Format</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/sparse/row_sparse.html">RowSparseNDArray - NDArray for Sparse Gradient Updates</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/np/index.html">What is NP on MXNet</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/np/cheat-sheet.html">The NP on MXNet cheat sheet</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/np/np-vs-numpy.html">Differences between NP on MXNet and NumPy</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/onnx/index.html">ONNX</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/fine_tuning_gluon.html">Fine-tuning an ONNX model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/inference_on_onnx_model.html">Running inference on MXNet/Gluon from an ONNX model</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/deploy/export/onnx.html">Export ONNX Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/optimizer/index.html">Optimizers</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/viz/index.html">Visualization</a><ul>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/visualize_graph">Visualize networks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/performance/index.html">Performance</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/performance/compression/index.html">Compression</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/compression/int8.html">Deploy with int-8</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/float16">Float16</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/gradient_compression">Gradient Compression</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/int8_inference.html">GluonCV with Quantized Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/performance/backend/index.html">Accelerated Backend Tools</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/dnnl/index.html">oneDNN</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/dnnl/dnnl_readme.html">Install MXNet with oneDNN</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/dnnl/dnnl_quantization.html">oneDNN Quantization</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/dnnl/dnnl_quantization_inc.html">Improving accuracy with Intel® Neural Compressor</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/tvm.html">Use TVM</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/profiler.html">Profiling MXNet Models</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/amp.html">Using AMP: Automatic Mixed Precision</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/deploy/index.html">Deployment</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/export/index.html">Export</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/export/onnx.html">Exporting to ONNX format</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/export_network.html">Export Gluon CV Models</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/blocks/save_load_params.html">Save / Load Parameters</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/inference/index.html">Inference</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/cpp.html">Deploy into C++</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/image_classification_jetson.html">Image Classication using pretrained ResNet-50 model on Jetson module</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/index.html">Run on AWS</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/use_ec2.html">Run on an EC2 Instance</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/use_sagemaker.html">Run on Amazon SageMaker</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/cloud.html">MXNet on the Cloud</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/extend/index.html">Extend</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/extend/customop.html">Custom Numpy Operators</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/new_op">New Operator Creation</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/add_op_in_backend">New Operator in MXNet Backend</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/using_rtc">Using RTC for CUDA kernels</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../../api/index.html">Python API</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../api/np/index.html">mxnet.np</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/np/arrays.html">Array objects</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/arrays.ndarray.html">The N-dimensional array (<code class="xref py py-class docutils literal notranslate"><span class="pre">ndarray</span></code>)</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/arrays.indexing.html">Indexing</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/np/routines.html">Routines</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.array-creation.html">Array creation routines</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.eye.html">mxnet.np.eye</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.empty.html">mxnet.np.empty</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.full.html">mxnet.np.full</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.identity.html">mxnet.np.identity</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ones.html">mxnet.np.ones</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ones_like.html">mxnet.np.ones_like</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.zeros.html">mxnet.np.zeros</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.zeros_like.html">mxnet.np.zeros_like</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.array.html">mxnet.np.array</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.copy.html">mxnet.np.copy</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arange.html">mxnet.np.arange</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linspace.html">mxnet.np.linspace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.logspace.html">mxnet.np.logspace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.meshgrid.html">mxnet.np.meshgrid</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.tril.html">mxnet.np.tril</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.array-manipulation.html">Array manipulation routines</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.reshape.html">mxnet.np.reshape</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ravel.html">mxnet.np.ravel</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ndarray.flatten.html">mxnet.np.ndarray.flatten</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.swapaxes.html">mxnet.np.swapaxes</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ndarray.T.html">mxnet.np.ndarray.T</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.transpose.html">mxnet.np.transpose</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.moveaxis.html">mxnet.np.moveaxis</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.rollaxis.html">mxnet.np.rollaxis</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.expand_dims.html">mxnet.np.expand_dims</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.squeeze.html">mxnet.np.squeeze</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.broadcast_to.html">mxnet.np.broadcast_to</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.broadcast_arrays.html">mxnet.np.broadcast_arrays</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.atleast_1d.html">mxnet.np.atleast_1d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.atleast_2d.html">mxnet.np.atleast_2d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.atleast_3d.html">mxnet.np.atleast_3d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.concatenate.html">mxnet.np.concatenate</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.stack.html">mxnet.np.stack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.dstack.html">mxnet.np.dstack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.vstack.html">mxnet.np.vstack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.column_stack.html">mxnet.np.column_stack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.hstack.html">mxnet.np.hstack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.split.html">mxnet.np.split</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.hsplit.html">mxnet.np.hsplit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.vsplit.html">mxnet.np.vsplit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.array_split.html">mxnet.np.array_split</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.dsplit.html">mxnet.np.dsplit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.tile.html">mxnet.np.tile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.repeat.html">mxnet.np.repeat</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.unique.html">mxnet.np.unique</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.delete.html">mxnet.np.delete</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.insert.html">mxnet.np.insert</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.append.html">mxnet.np.append</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.resize.html">mxnet.np.resize</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.trim_zeros.html">mxnet.np.trim_zeros</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.reshape.html">mxnet.np.reshape</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.flip.html">mxnet.np.flip</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.roll.html">mxnet.np.roll</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.rot90.html">mxnet.np.rot90</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.fliplr.html">mxnet.np.fliplr</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.flipud.html">mxnet.np.flipud</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.io.html">Input and output</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.genfromtxt.html">mxnet.np.genfromtxt</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ndarray.tolist.html">mxnet.np.ndarray.tolist</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.set_printoptions.html">mxnet.np.set_printoptions</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.linalg.html">Linear algebra (<code class="xref py py-mod docutils literal notranslate"><span class="pre">numpy.linalg</span></code>)</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.dot.html">mxnet.np.dot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.vdot.html">mxnet.np.vdot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.inner.html">mxnet.np.inner</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.outer.html">mxnet.np.outer</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.tensordot.html">mxnet.np.tensordot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.einsum.html">mxnet.np.einsum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.multi_dot.html">mxnet.np.linalg.multi_dot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.matmul.html">mxnet.np.matmul</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.matrix_power.html">mxnet.np.linalg.matrix_power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.kron.html">mxnet.np.kron</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.svd.html">mxnet.np.linalg.svd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.cholesky.html">mxnet.np.linalg.cholesky</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.qr.html">mxnet.np.linalg.qr</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.eig.html">mxnet.np.linalg.eig</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.eigh.html">mxnet.np.linalg.eigh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.eigvals.html">mxnet.np.linalg.eigvals</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.eigvalsh.html">mxnet.np.linalg.eigvalsh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.norm.html">mxnet.np.linalg.norm</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.trace.html">mxnet.np.trace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.cond.html">mxnet.np.linalg.cond</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.det.html">mxnet.np.linalg.det</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.matrix_rank.html">mxnet.np.linalg.matrix_rank</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.slogdet.html">mxnet.np.linalg.slogdet</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.solve.html">mxnet.np.linalg.solve</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.tensorsolve.html">mxnet.np.linalg.tensorsolve</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.lstsq.html">mxnet.np.linalg.lstsq</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.inv.html">mxnet.np.linalg.inv</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.pinv.html">mxnet.np.linalg.pinv</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.tensorinv.html">mxnet.np.linalg.tensorinv</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.math.html">Mathematical functions</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.sin.html">mxnet.np.sin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cos.html">mxnet.np.cos</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.tan.html">mxnet.np.tan</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arcsin.html">mxnet.np.arcsin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arccos.html">mxnet.np.arccos</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arctan.html">mxnet.np.arctan</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.degrees.html">mxnet.np.degrees</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.radians.html">mxnet.np.radians</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.hypot.html">mxnet.np.hypot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arctan2.html">mxnet.np.arctan2</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.deg2rad.html">mxnet.np.deg2rad</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.rad2deg.html">mxnet.np.rad2deg</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.unwrap.html">mxnet.np.unwrap</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.sinh.html">mxnet.np.sinh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cosh.html">mxnet.np.cosh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.tanh.html">mxnet.np.tanh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arcsinh.html">mxnet.np.arcsinh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arccosh.html">mxnet.np.arccosh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arctanh.html">mxnet.np.arctanh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.rint.html">mxnet.np.rint</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.fix.html">mxnet.np.fix</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.floor.html">mxnet.np.floor</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ceil.html">mxnet.np.ceil</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.trunc.html">mxnet.np.trunc</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.around.html">mxnet.np.around</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.round_.html">mxnet.np.round_</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.sum.html">mxnet.np.sum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.prod.html">mxnet.np.prod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cumsum.html">mxnet.np.cumsum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanprod.html">mxnet.np.nanprod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nansum.html">mxnet.np.nansum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cumprod.html">mxnet.np.cumprod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nancumprod.html">mxnet.np.nancumprod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nancumsum.html">mxnet.np.nancumsum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.diff.html">mxnet.np.diff</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ediff1d.html">mxnet.np.ediff1d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cross.html">mxnet.np.cross</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.trapz.html">mxnet.np.trapz</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.exp.html">mxnet.np.exp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.expm1.html">mxnet.np.expm1</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.log.html">mxnet.np.log</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.log10.html">mxnet.np.log10</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.log2.html">mxnet.np.log2</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.log1p.html">mxnet.np.log1p</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.logaddexp.html">mxnet.np.logaddexp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.i0.html">mxnet.np.i0</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ldexp.html">mxnet.np.ldexp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.signbit.html">mxnet.np.signbit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.copysign.html">mxnet.np.copysign</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.frexp.html">mxnet.np.frexp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.spacing.html">mxnet.np.spacing</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.lcm.html">mxnet.np.lcm</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.gcd.html">mxnet.np.gcd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.add.html">mxnet.np.add</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.reciprocal.html">mxnet.np.reciprocal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.negative.html">mxnet.np.negative</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.divide.html">mxnet.np.divide</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.power.html">mxnet.np.power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.subtract.html">mxnet.np.subtract</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.mod.html">mxnet.np.mod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.multiply.html">mxnet.np.multiply</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.true_divide.html">mxnet.np.true_divide</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.remainder.html">mxnet.np.remainder</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.positive.html">mxnet.np.positive</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.float_power.html">mxnet.np.float_power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.fmod.html">mxnet.np.fmod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.modf.html">mxnet.np.modf</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.divmod.html">mxnet.np.divmod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.floor_divide.html">mxnet.np.floor_divide</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.clip.html">mxnet.np.clip</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.sqrt.html">mxnet.np.sqrt</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cbrt.html">mxnet.np.cbrt</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.square.html">mxnet.np.square</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.absolute.html">mxnet.np.absolute</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.sign.html">mxnet.np.sign</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.maximum.html">mxnet.np.maximum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.minimum.html">mxnet.np.minimum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.fabs.html">mxnet.np.fabs</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.heaviside.html">mxnet.np.heaviside</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.fmax.html">mxnet.np.fmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.fmin.html">mxnet.np.fmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nan_to_num.html">mxnet.np.nan_to_num</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.interp.html">mxnet.np.interp</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/random/index.html">np.random</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.choice.html">mxnet.np.random.choice</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.shuffle.html">mxnet.np.random.shuffle</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.normal.html">mxnet.np.random.normal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.uniform.html">mxnet.np.random.uniform</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.rand.html">mxnet.np.random.rand</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.randint.html">mxnet.np.random.randint</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.beta.html">mxnet.np.random.beta</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.chisquare.html">mxnet.np.random.chisquare</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.exponential.html">mxnet.np.random.exponential</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.f.html">mxnet.np.random.f</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.gamma.html">mxnet.np.random.gamma</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.gumbel.html">mxnet.np.random.gumbel</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.laplace.html">mxnet.np.random.laplace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.logistic.html">mxnet.np.random.logistic</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.lognormal.html">mxnet.np.random.lognormal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.multinomial.html">mxnet.np.random.multinomial</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.multivariate_normal.html">mxnet.np.random.multivariate_normal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.pareto.html">mxnet.np.random.pareto</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.power.html">mxnet.np.random.power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.rayleigh.html">mxnet.np.random.rayleigh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.weibull.html">mxnet.np.random.weibull</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.sort.html">Sorting, searching, and counting</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ndarray.sort.html">mxnet.np.ndarray.sort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.sort.html">mxnet.np.sort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.lexsort.html">mxnet.np.lexsort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.argsort.html">mxnet.np.argsort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.msort.html">mxnet.np.msort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.partition.html">mxnet.np.partition</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.argpartition.html">mxnet.np.argpartition</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.argmax.html">mxnet.np.argmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.argmin.html">mxnet.np.argmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanargmax.html">mxnet.np.nanargmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanargmin.html">mxnet.np.nanargmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.argwhere.html">mxnet.np.argwhere</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nonzero.html">mxnet.np.nonzero</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.flatnonzero.html">mxnet.np.flatnonzero</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.where.html">mxnet.np.where</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.searchsorted.html">mxnet.np.searchsorted</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.extract.html">mxnet.np.extract</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.count_nonzero.html">mxnet.np.count_nonzero</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.statistics.html">Statistics</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.min.html">mxnet.np.min</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.max.html">mxnet.np.max</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.amin.html">mxnet.np.amin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.amax.html">mxnet.np.amax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanmin.html">mxnet.np.nanmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanmax.html">mxnet.np.nanmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ptp.html">mxnet.np.ptp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.percentile.html">mxnet.np.percentile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanpercentile.html">mxnet.np.nanpercentile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.quantile.html">mxnet.np.quantile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanquantile.html">mxnet.np.nanquantile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.mean.html">mxnet.np.mean</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.std.html">mxnet.np.std</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.var.html">mxnet.np.var</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.median.html">mxnet.np.median</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.average.html">mxnet.np.average</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanmedian.html">mxnet.np.nanmedian</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanstd.html">mxnet.np.nanstd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanvar.html">mxnet.np.nanvar</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.corrcoef.html">mxnet.np.corrcoef</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.correlate.html">mxnet.np.correlate</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cov.html">mxnet.np.cov</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.histogram.html">mxnet.np.histogram</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.histogram2d.html">mxnet.np.histogram2d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.histogramdd.html">mxnet.np.histogramdd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.bincount.html">mxnet.np.bincount</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.histogram_bin_edges.html">mxnet.np.histogram_bin_edges</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.digitize.html">mxnet.np.digitize</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/npx/index.html">NPX: NumPy Neural Network Extension</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.set_np.html">mxnet.npx.set_np</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.reset_np.html">mxnet.npx.reset_np</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.cpu.html">mxnet.npx.cpu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.cpu_pinned.html">mxnet.npx.cpu_pinned</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.gpu.html">mxnet.npx.gpu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.gpu_memory_info.html">mxnet.npx.gpu_memory_info</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.current_device.html">mxnet.npx.current_device</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.num_gpus.html">mxnet.npx.num_gpus</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.activation.html">mxnet.npx.activation</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.batch_norm.html">mxnet.npx.batch_norm</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.convolution.html">mxnet.npx.convolution</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.dropout.html">mxnet.npx.dropout</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.embedding.html">mxnet.npx.embedding</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.fully_connected.html">mxnet.npx.fully_connected</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.layer_norm.html">mxnet.npx.layer_norm</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.pooling.html">mxnet.npx.pooling</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.rnn.html">mxnet.npx.rnn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.leaky_relu.html">mxnet.npx.leaky_relu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.multibox_detection.html">mxnet.npx.multibox_detection</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.multibox_prior.html">mxnet.npx.multibox_prior</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.multibox_target.html">mxnet.npx.multibox_target</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.roi_pooling.html">mxnet.npx.roi_pooling</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.sigmoid.html">mxnet.npx.sigmoid</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.relu.html">mxnet.npx.relu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.smooth_l1.html">mxnet.npx.smooth_l1</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.softmax.html">mxnet.npx.softmax</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.log_softmax.html">mxnet.npx.log_softmax</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.topk.html">mxnet.npx.topk</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.waitall.html">mxnet.npx.waitall</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.load.html">mxnet.npx.load</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.save.html">mxnet.npx.save</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.one_hot.html">mxnet.npx.one_hot</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.pick.html">mxnet.npx.pick</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.reshape_like.html">mxnet.npx.reshape_like</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.batch_flatten.html">mxnet.npx.batch_flatten</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.batch_dot.html">mxnet.npx.batch_dot</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.gamma.html">mxnet.npx.gamma</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.sequence_mask.html">mxnet.npx.sequence_mask</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/gluon/index.html">mxnet.gluon</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/block.html">gluon.Block</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/hybrid_block.html">gluon.HybridBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/symbol_block.html">gluon.SymbolBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/constant.html">gluon.Constant</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/parameter.html">gluon.Parameter</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/trainer.html">gluon.Trainer</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/contrib/index.html">gluon.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/data/index.html">gluon.data</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/gluon/data/vision/index.html">data.vision</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/gluon/data/vision/datasets/index.html">vision.datasets</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/gluon/data/vision/transforms/index.html">vision.transforms</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/loss/index.html">gluon.loss</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/metric/index.html">gluon.metric</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/model_zoo/index.html">gluon.model_zoo.vision</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/nn/index.html">gluon.nn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/rnn/index.html">gluon.rnn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/utils/index.html">gluon.utils</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/autograd/index.html">mxnet.autograd</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/initializer/index.html">mxnet.initializer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/optimizer/index.html">mxnet.optimizer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/lr_scheduler/index.html">mxnet.lr_scheduler</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore/index.html">KVStore: Communication for Distributed Training</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore/index.html#horovod">Horovod</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/kvstore/generated/mxnet.kvstore.Horovod.html">mxnet.kvstore.Horovod</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore/index.html#byteps">BytePS</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/kvstore/generated/mxnet.kvstore.BytePS.html">mxnet.kvstore.BytePS</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore/index.html#kvstore-interface">KVStore Interface</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/kvstore/generated/mxnet.kvstore.KVStore.html">mxnet.kvstore.KVStore</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/kvstore/generated/mxnet.kvstore.KVStoreBase.html">mxnet.kvstore.KVStoreBase</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/kvstore/generated/mxnet.kvstore.KVStoreServer.html">mxnet.kvstore.KVStoreServer</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/contrib/index.html">mxnet.contrib</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/io/index.html">contrib.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/ndarray/index.html">contrib.ndarray</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/onnx/index.html">contrib.onnx</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/quantization/index.html">contrib.quantization</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/symbol/index.html">contrib.symbol</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/tensorboard/index.html">contrib.tensorboard</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/tensorrt/index.html">contrib.tensorrt</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/text/index.html">contrib.text</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/legacy/index.html">Legacy</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/callback/index.html">mxnet.callback</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/image/index.html">mxnet.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/io/index.html">mxnet.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/ndarray/index.html">mxnet.ndarray</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/ndarray.html">ndarray</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/contrib/index.html">ndarray.contrib</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/image/index.html">ndarray.image</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/linalg/index.html">ndarray.linalg</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/op/index.html">ndarray.op</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/random/index.html">ndarray.random</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/register/index.html">ndarray.register</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/sparse/index.html">ndarray.sparse</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/utils/index.html">ndarray.utils</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/recordio/index.html">mxnet.recordio</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/symbol/index.html">mxnet.symbol</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/symbol.html">symbol</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/contrib/index.html">symbol.contrib</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/image/index.html">symbol.image</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/linalg/index.html">symbol.linalg</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/op/index.html">symbol.op</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/random/index.html">symbol.random</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/register/index.html">symbol.register</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/sparse/index.html">symbol.sparse</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/visualization/index.html">mxnet.visualization</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/device/index.html">mxnet.device</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/engine/index.html">mxnet.engine</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/executor/index.html">mxnet.executor</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore_server/index.html">mxnet.kvstore_server</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/profiler/index.html">mxnet.profiler</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/rtc/index.html">mxnet.rtc</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/runtime/index.html">mxnet.runtime</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/runtime/generated/mxnet.runtime.Feature.html">mxnet.runtime.Feature</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/runtime/generated/mxnet.runtime.Features.html">mxnet.runtime.Features</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/runtime/generated/mxnet.runtime.feature_list.html">mxnet.runtime.feature_list</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/test_utils/index.html">mxnet.test_utils</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/util/index.html">mxnet.util</a></li>
</ul>
</li>
</ul>
</nav>
</div>
</header>
<main class="mdl-layout__content" tabIndex="0">
<header class="mdl-layout__drawer">
<div class="globaltoc">
<span class="mdl-layout-title toc">Table Of Contents</span>
<nav class="mdl-navigation">
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../../tutorials/index.html">Python Tutorials</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/getting-started/index.html">Getting Started</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/index.html">Crash Course</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/0-introduction.html">Introduction</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/1-nparray.html">Step 1: Manipulate data with NP on MXNet</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/2-create-nn.html">Step 2: Create a neural network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/3-autograd.html">Step 3: Automatic differentiation with autograd</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/4-components.html">Step 4: Necessary components that are not in the network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/5-datasets.html">Step 5: <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s and <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/5-datasets.html#Using-own-data-with-included-Datasets">Using own data with included <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/5-datasets.html#Using-your-own-data-with-custom-Datasets">Using your own data with custom <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/5-datasets.html#New-in-MXNet-2.0:-faster-C++-backend-dataloaders">New in MXNet 2.0: faster C++ backend dataloaders</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/6-train-nn.html">Step 6: Train a Neural Network</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/crash-course/7-use-gpus.html">Step 7: Load and Run a NN using GPU</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/to-mxnet/index.html">Moving to MXNet from Other Frameworks</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/getting-started/to-mxnet/pytorch.html">PyTorch vs Apache MXNet</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/gluon_from_experiment_to_deployment.html">Gluon: from experiment to deployment</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/gluon_migration_guide.html">Gluon2.0: Migration Guide</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/getting-started/logistic_regression_explained.html">Logistic regression explained</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/image/mnist.html">MNIST</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/packages/index.html">Packages</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/autograd/index.html">Automatic Differentiation</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/gluon/index.html">Gluon</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/index.html">Blocks</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/custom-layer.html">Custom Layers</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/hybridize.html">Hybridize</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/init.html">Initialization</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/naming.html">Parameter and Block Naming</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/nn.html">Layers and Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/parameters.html">Parameter Management</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/save_load_params.html">Saving and Loading Gluon Models</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/blocks/activations/activations.html">Activation Blocks</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/data/index.html">Data Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/data_augmentation.html">Image Augmentation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html">Gluon <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s and <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Using-own-data-with-included-Datasets">Using own data with included <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Using-own-data-with-custom-Datasets">Using own data with custom <code class="docutils literal notranslate"><span class="pre">Dataset</span></code>s</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/data/datasets.html#Appendix:-Upgrading-from-Module-DataIter-to-Gluon-DataLoader">Appendix: Upgrading from Module <code class="docutils literal notranslate"><span class="pre">DataIter</span></code> to Gluon <code class="docutils literal notranslate"><span class="pre">DataLoader</span></code></a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/image/index.html">Image Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/info_gan.html">Image similarity search with InfoGAN</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/image/mnist.html">Handwritten Digit Recognition</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/index.html">Losses</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/custom-loss.html">Custom Loss Blocks</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/kl_divergence.html">Kullback-Leibler (KL) Divergence</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/loss/loss.html">Loss functions</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/text/index.html">Text Tutorials</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/text/gnmt.html">Google Neural Machine Translation</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/text/transformer.html">Machine Translation with Transformer</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/gluon/training/index.html">Training</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/fit_api_tutorial.html">MXNet Gluon Fit API</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/trainer.html">Trainer</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/index.html">Learning Rates</a><ul>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_finder.html">Learning Rate Finder</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_schedules.html">Learning Rate Schedules</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/gluon/training/learning_rates/learning_rate_schedules_advanced.html">Advanced Learning Rate Schedules</a></li>
</ul>
</li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/gluon/training/normalization/index.html">Normalization Blocks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/kvstore/index.html">KVStore</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/kvstore/kvstore.html">Distributed Key-Value Store</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/legacy/index.html">Legacy</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/index.html">NDArray</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/01-ndarray-intro.html">An Intro: Manipulate Data the MXNet Way with NDArray</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/02-ndarray-operations.html">NDArray Operations</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/03-ndarray-contexts.html">NDArray Contexts</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/gotchas_numpy_in_mxnet.html">Gotchas using NumPy in Apache MXNet</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/sparse/index.html">Tutorials</a><ul>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/sparse/csr.html">CSRNDArray - NDArray in Compressed Sparse Row Storage Format</a></li>
<li class="toctree-l6"><a class="reference internal" href="../../../tutorials/packages/legacy/ndarray/sparse/row_sparse.html">RowSparseNDArray - NDArray for Sparse Gradient Updates</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/np/index.html">What is NP on MXNet</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/np/cheat-sheet.html">The NP on MXNet cheat sheet</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/np/np-vs-numpy.html">Differences between NP on MXNet and NumPy</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/onnx/index.html">ONNX</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/fine_tuning_gluon.html">Fine-tuning an ONNX model</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/packages/onnx/inference_on_onnx_model.html">Running inference on MXNet/Gluon from an ONNX model</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/deploy/export/onnx.html">Export ONNX Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/optimizer/index.html">Optimizers</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/packages/viz/index.html">Visualization</a><ul>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/visualize_graph">Visualize networks</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/performance/index.html">Performance</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/performance/compression/index.html">Compression</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/compression/int8.html">Deploy with int-8</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/float16">Float16</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/faq/gradient_compression">Gradient Compression</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/int8_inference.html">GluonCV with Quantized Models</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/performance/backend/index.html">Accelerated Backend Tools</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/dnnl/index.html">oneDNN</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/dnnl/dnnl_readme.html">Install MXNet with oneDNN</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/dnnl/dnnl_quantization.html">oneDNN Quantization</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../tutorials/performance/backend/dnnl/dnnl_quantization_inc.html">Improving accuracy with Intel® Neural Compressor</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/tvm.html">Use TVM</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/profiler.html">Profiling MXNet Models</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/performance/backend/amp.html">Using AMP: Automatic Mixed Precision</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/deploy/index.html">Deployment</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/export/index.html">Export</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/export/onnx.html">Exporting to ONNX format</a></li>
<li class="toctree-l4"><a class="reference external" href="https://gluon-cv.mxnet.io/build/examples_deployment/export_network.html">Export Gluon CV Models</a></li>
<li class="toctree-l4"><a class="reference external" href="https://mxnet.apache.org/api/python/docs/tutorials/packages/gluon/blocks/save_load_params.html">Save / Load Parameters</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/inference/index.html">Inference</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/cpp.html">Deploy into C++</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/inference/image_classification_jetson.html">Image Classication using pretrained ResNet-50 model on Jetson module</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/index.html">Run on AWS</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/use_ec2.html">Run on an EC2 Instance</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/use_sagemaker.html">Run on Amazon SageMaker</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../tutorials/deploy/run-on-aws/cloud.html">MXNet on the Cloud</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../tutorials/extend/index.html">Extend</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../tutorials/extend/customop.html">Custom Numpy Operators</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/new_op">New Operator Creation</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/add_op_in_backend">New Operator in MXNet Backend</a></li>
<li class="toctree-l3"><a class="reference external" href="https://mxnet.apache.org/api/faq/using_rtc">Using RTC for CUDA kernels</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../../api/index.html">Python API</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../../api/np/index.html">mxnet.np</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/np/arrays.html">Array objects</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/arrays.ndarray.html">The N-dimensional array (<code class="xref py py-class docutils literal notranslate"><span class="pre">ndarray</span></code>)</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/arrays.indexing.html">Indexing</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/np/routines.html">Routines</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.array-creation.html">Array creation routines</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.eye.html">mxnet.np.eye</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.empty.html">mxnet.np.empty</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.full.html">mxnet.np.full</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.identity.html">mxnet.np.identity</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ones.html">mxnet.np.ones</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ones_like.html">mxnet.np.ones_like</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.zeros.html">mxnet.np.zeros</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.zeros_like.html">mxnet.np.zeros_like</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.array.html">mxnet.np.array</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.copy.html">mxnet.np.copy</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arange.html">mxnet.np.arange</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linspace.html">mxnet.np.linspace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.logspace.html">mxnet.np.logspace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.meshgrid.html">mxnet.np.meshgrid</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.tril.html">mxnet.np.tril</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.array-manipulation.html">Array manipulation routines</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.reshape.html">mxnet.np.reshape</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ravel.html">mxnet.np.ravel</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ndarray.flatten.html">mxnet.np.ndarray.flatten</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.swapaxes.html">mxnet.np.swapaxes</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ndarray.T.html">mxnet.np.ndarray.T</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.transpose.html">mxnet.np.transpose</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.moveaxis.html">mxnet.np.moveaxis</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.rollaxis.html">mxnet.np.rollaxis</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.expand_dims.html">mxnet.np.expand_dims</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.squeeze.html">mxnet.np.squeeze</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.broadcast_to.html">mxnet.np.broadcast_to</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.broadcast_arrays.html">mxnet.np.broadcast_arrays</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.atleast_1d.html">mxnet.np.atleast_1d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.atleast_2d.html">mxnet.np.atleast_2d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.atleast_3d.html">mxnet.np.atleast_3d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.concatenate.html">mxnet.np.concatenate</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.stack.html">mxnet.np.stack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.dstack.html">mxnet.np.dstack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.vstack.html">mxnet.np.vstack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.column_stack.html">mxnet.np.column_stack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.hstack.html">mxnet.np.hstack</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.split.html">mxnet.np.split</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.hsplit.html">mxnet.np.hsplit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.vsplit.html">mxnet.np.vsplit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.array_split.html">mxnet.np.array_split</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.dsplit.html">mxnet.np.dsplit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.tile.html">mxnet.np.tile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.repeat.html">mxnet.np.repeat</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.unique.html">mxnet.np.unique</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.delete.html">mxnet.np.delete</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.insert.html">mxnet.np.insert</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.append.html">mxnet.np.append</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.resize.html">mxnet.np.resize</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.trim_zeros.html">mxnet.np.trim_zeros</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.reshape.html">mxnet.np.reshape</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.flip.html">mxnet.np.flip</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.roll.html">mxnet.np.roll</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.rot90.html">mxnet.np.rot90</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.fliplr.html">mxnet.np.fliplr</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.flipud.html">mxnet.np.flipud</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.io.html">Input and output</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.genfromtxt.html">mxnet.np.genfromtxt</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ndarray.tolist.html">mxnet.np.ndarray.tolist</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.set_printoptions.html">mxnet.np.set_printoptions</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.linalg.html">Linear algebra (<code class="xref py py-mod docutils literal notranslate"><span class="pre">numpy.linalg</span></code>)</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.dot.html">mxnet.np.dot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.vdot.html">mxnet.np.vdot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.inner.html">mxnet.np.inner</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.outer.html">mxnet.np.outer</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.tensordot.html">mxnet.np.tensordot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.einsum.html">mxnet.np.einsum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.multi_dot.html">mxnet.np.linalg.multi_dot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.matmul.html">mxnet.np.matmul</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.matrix_power.html">mxnet.np.linalg.matrix_power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.kron.html">mxnet.np.kron</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.svd.html">mxnet.np.linalg.svd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.cholesky.html">mxnet.np.linalg.cholesky</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.qr.html">mxnet.np.linalg.qr</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.eig.html">mxnet.np.linalg.eig</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.eigh.html">mxnet.np.linalg.eigh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.eigvals.html">mxnet.np.linalg.eigvals</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.eigvalsh.html">mxnet.np.linalg.eigvalsh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.norm.html">mxnet.np.linalg.norm</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.trace.html">mxnet.np.trace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.cond.html">mxnet.np.linalg.cond</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.det.html">mxnet.np.linalg.det</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.matrix_rank.html">mxnet.np.linalg.matrix_rank</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.slogdet.html">mxnet.np.linalg.slogdet</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.solve.html">mxnet.np.linalg.solve</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.tensorsolve.html">mxnet.np.linalg.tensorsolve</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.lstsq.html">mxnet.np.linalg.lstsq</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.inv.html">mxnet.np.linalg.inv</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.pinv.html">mxnet.np.linalg.pinv</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.linalg.tensorinv.html">mxnet.np.linalg.tensorinv</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.math.html">Mathematical functions</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.sin.html">mxnet.np.sin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cos.html">mxnet.np.cos</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.tan.html">mxnet.np.tan</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arcsin.html">mxnet.np.arcsin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arccos.html">mxnet.np.arccos</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arctan.html">mxnet.np.arctan</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.degrees.html">mxnet.np.degrees</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.radians.html">mxnet.np.radians</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.hypot.html">mxnet.np.hypot</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arctan2.html">mxnet.np.arctan2</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.deg2rad.html">mxnet.np.deg2rad</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.rad2deg.html">mxnet.np.rad2deg</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.unwrap.html">mxnet.np.unwrap</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.sinh.html">mxnet.np.sinh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cosh.html">mxnet.np.cosh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.tanh.html">mxnet.np.tanh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arcsinh.html">mxnet.np.arcsinh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arccosh.html">mxnet.np.arccosh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.arctanh.html">mxnet.np.arctanh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.rint.html">mxnet.np.rint</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.fix.html">mxnet.np.fix</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.floor.html">mxnet.np.floor</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ceil.html">mxnet.np.ceil</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.trunc.html">mxnet.np.trunc</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.around.html">mxnet.np.around</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.round_.html">mxnet.np.round_</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.sum.html">mxnet.np.sum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.prod.html">mxnet.np.prod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cumsum.html">mxnet.np.cumsum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanprod.html">mxnet.np.nanprod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nansum.html">mxnet.np.nansum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cumprod.html">mxnet.np.cumprod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nancumprod.html">mxnet.np.nancumprod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nancumsum.html">mxnet.np.nancumsum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.diff.html">mxnet.np.diff</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ediff1d.html">mxnet.np.ediff1d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cross.html">mxnet.np.cross</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.trapz.html">mxnet.np.trapz</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.exp.html">mxnet.np.exp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.expm1.html">mxnet.np.expm1</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.log.html">mxnet.np.log</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.log10.html">mxnet.np.log10</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.log2.html">mxnet.np.log2</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.log1p.html">mxnet.np.log1p</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.logaddexp.html">mxnet.np.logaddexp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.i0.html">mxnet.np.i0</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ldexp.html">mxnet.np.ldexp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.signbit.html">mxnet.np.signbit</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.copysign.html">mxnet.np.copysign</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.frexp.html">mxnet.np.frexp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.spacing.html">mxnet.np.spacing</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.lcm.html">mxnet.np.lcm</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.gcd.html">mxnet.np.gcd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.add.html">mxnet.np.add</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.reciprocal.html">mxnet.np.reciprocal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.negative.html">mxnet.np.negative</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.divide.html">mxnet.np.divide</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.power.html">mxnet.np.power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.subtract.html">mxnet.np.subtract</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.mod.html">mxnet.np.mod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.multiply.html">mxnet.np.multiply</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.true_divide.html">mxnet.np.true_divide</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.remainder.html">mxnet.np.remainder</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.positive.html">mxnet.np.positive</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.float_power.html">mxnet.np.float_power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.fmod.html">mxnet.np.fmod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.modf.html">mxnet.np.modf</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.divmod.html">mxnet.np.divmod</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.floor_divide.html">mxnet.np.floor_divide</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.clip.html">mxnet.np.clip</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.sqrt.html">mxnet.np.sqrt</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cbrt.html">mxnet.np.cbrt</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.square.html">mxnet.np.square</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.absolute.html">mxnet.np.absolute</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.sign.html">mxnet.np.sign</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.maximum.html">mxnet.np.maximum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.minimum.html">mxnet.np.minimum</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.fabs.html">mxnet.np.fabs</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.heaviside.html">mxnet.np.heaviside</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.fmax.html">mxnet.np.fmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.fmin.html">mxnet.np.fmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nan_to_num.html">mxnet.np.nan_to_num</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.interp.html">mxnet.np.interp</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/random/index.html">np.random</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.choice.html">mxnet.np.random.choice</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.shuffle.html">mxnet.np.random.shuffle</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.normal.html">mxnet.np.random.normal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.uniform.html">mxnet.np.random.uniform</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.rand.html">mxnet.np.random.rand</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.randint.html">mxnet.np.random.randint</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.beta.html">mxnet.np.random.beta</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.chisquare.html">mxnet.np.random.chisquare</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.exponential.html">mxnet.np.random.exponential</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.f.html">mxnet.np.random.f</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.gamma.html">mxnet.np.random.gamma</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.gumbel.html">mxnet.np.random.gumbel</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.laplace.html">mxnet.np.random.laplace</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.logistic.html">mxnet.np.random.logistic</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.lognormal.html">mxnet.np.random.lognormal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.multinomial.html">mxnet.np.random.multinomial</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.multivariate_normal.html">mxnet.np.random.multivariate_normal</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.pareto.html">mxnet.np.random.pareto</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.power.html">mxnet.np.random.power</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.rayleigh.html">mxnet.np.random.rayleigh</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/random/generated/mxnet.np.random.weibull.html">mxnet.np.random.weibull</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.sort.html">Sorting, searching, and counting</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ndarray.sort.html">mxnet.np.ndarray.sort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.sort.html">mxnet.np.sort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.lexsort.html">mxnet.np.lexsort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.argsort.html">mxnet.np.argsort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.msort.html">mxnet.np.msort</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.partition.html">mxnet.np.partition</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.argpartition.html">mxnet.np.argpartition</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.argmax.html">mxnet.np.argmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.argmin.html">mxnet.np.argmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanargmax.html">mxnet.np.nanargmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanargmin.html">mxnet.np.nanargmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.argwhere.html">mxnet.np.argwhere</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nonzero.html">mxnet.np.nonzero</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.flatnonzero.html">mxnet.np.flatnonzero</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.where.html">mxnet.np.where</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.searchsorted.html">mxnet.np.searchsorted</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.extract.html">mxnet.np.extract</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.count_nonzero.html">mxnet.np.count_nonzero</a></li>
</ul>
</li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/np/routines.statistics.html">Statistics</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.min.html">mxnet.np.min</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.max.html">mxnet.np.max</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.amin.html">mxnet.np.amin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.amax.html">mxnet.np.amax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanmin.html">mxnet.np.nanmin</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanmax.html">mxnet.np.nanmax</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.ptp.html">mxnet.np.ptp</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.percentile.html">mxnet.np.percentile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanpercentile.html">mxnet.np.nanpercentile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.quantile.html">mxnet.np.quantile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanquantile.html">mxnet.np.nanquantile</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.mean.html">mxnet.np.mean</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.std.html">mxnet.np.std</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.var.html">mxnet.np.var</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.median.html">mxnet.np.median</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.average.html">mxnet.np.average</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanmedian.html">mxnet.np.nanmedian</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanstd.html">mxnet.np.nanstd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.nanvar.html">mxnet.np.nanvar</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.corrcoef.html">mxnet.np.corrcoef</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.correlate.html">mxnet.np.correlate</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.cov.html">mxnet.np.cov</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.histogram.html">mxnet.np.histogram</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.histogram2d.html">mxnet.np.histogram2d</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.histogramdd.html">mxnet.np.histogramdd</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.bincount.html">mxnet.np.bincount</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.histogram_bin_edges.html">mxnet.np.histogram_bin_edges</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/np/generated/mxnet.np.digitize.html">mxnet.np.digitize</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/npx/index.html">NPX: NumPy Neural Network Extension</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.set_np.html">mxnet.npx.set_np</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.reset_np.html">mxnet.npx.reset_np</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.cpu.html">mxnet.npx.cpu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.cpu_pinned.html">mxnet.npx.cpu_pinned</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.gpu.html">mxnet.npx.gpu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.gpu_memory_info.html">mxnet.npx.gpu_memory_info</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.current_device.html">mxnet.npx.current_device</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.num_gpus.html">mxnet.npx.num_gpus</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.activation.html">mxnet.npx.activation</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.batch_norm.html">mxnet.npx.batch_norm</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.convolution.html">mxnet.npx.convolution</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.dropout.html">mxnet.npx.dropout</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.embedding.html">mxnet.npx.embedding</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.fully_connected.html">mxnet.npx.fully_connected</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.layer_norm.html">mxnet.npx.layer_norm</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.pooling.html">mxnet.npx.pooling</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.rnn.html">mxnet.npx.rnn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.leaky_relu.html">mxnet.npx.leaky_relu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.multibox_detection.html">mxnet.npx.multibox_detection</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.multibox_prior.html">mxnet.npx.multibox_prior</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.multibox_target.html">mxnet.npx.multibox_target</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.roi_pooling.html">mxnet.npx.roi_pooling</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.sigmoid.html">mxnet.npx.sigmoid</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.relu.html">mxnet.npx.relu</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.smooth_l1.html">mxnet.npx.smooth_l1</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.softmax.html">mxnet.npx.softmax</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.log_softmax.html">mxnet.npx.log_softmax</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.topk.html">mxnet.npx.topk</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.waitall.html">mxnet.npx.waitall</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.load.html">mxnet.npx.load</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.save.html">mxnet.npx.save</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.one_hot.html">mxnet.npx.one_hot</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.pick.html">mxnet.npx.pick</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.reshape_like.html">mxnet.npx.reshape_like</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.batch_flatten.html">mxnet.npx.batch_flatten</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.batch_dot.html">mxnet.npx.batch_dot</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.gamma.html">mxnet.npx.gamma</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/npx/generated/mxnet.npx.sequence_mask.html">mxnet.npx.sequence_mask</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/gluon/index.html">mxnet.gluon</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/block.html">gluon.Block</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/hybrid_block.html">gluon.HybridBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/symbol_block.html">gluon.SymbolBlock</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/constant.html">gluon.Constant</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/parameter.html">gluon.Parameter</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/trainer.html">gluon.Trainer</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/contrib/index.html">gluon.contrib</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/data/index.html">gluon.data</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/gluon/data/vision/index.html">data.vision</a><ul>
<li class="toctree-l5"><a class="reference internal" href="../../../api/gluon/data/vision/datasets/index.html">vision.datasets</a></li>
<li class="toctree-l5"><a class="reference internal" href="../../../api/gluon/data/vision/transforms/index.html">vision.transforms</a></li>
</ul>
</li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/loss/index.html">gluon.loss</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/metric/index.html">gluon.metric</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/model_zoo/index.html">gluon.model_zoo.vision</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/nn/index.html">gluon.nn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/rnn/index.html">gluon.rnn</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/gluon/utils/index.html">gluon.utils</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/autograd/index.html">mxnet.autograd</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/initializer/index.html">mxnet.initializer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/optimizer/index.html">mxnet.optimizer</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/lr_scheduler/index.html">mxnet.lr_scheduler</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore/index.html">KVStore: Communication for Distributed Training</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore/index.html#horovod">Horovod</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/kvstore/generated/mxnet.kvstore.Horovod.html">mxnet.kvstore.Horovod</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore/index.html#byteps">BytePS</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/kvstore/generated/mxnet.kvstore.BytePS.html">mxnet.kvstore.BytePS</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore/index.html#kvstore-interface">KVStore Interface</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/kvstore/generated/mxnet.kvstore.KVStore.html">mxnet.kvstore.KVStore</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/kvstore/generated/mxnet.kvstore.KVStoreBase.html">mxnet.kvstore.KVStoreBase</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/kvstore/generated/mxnet.kvstore.KVStoreServer.html">mxnet.kvstore.KVStoreServer</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/contrib/index.html">mxnet.contrib</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/io/index.html">contrib.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/ndarray/index.html">contrib.ndarray</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/onnx/index.html">contrib.onnx</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/quantization/index.html">contrib.quantization</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/symbol/index.html">contrib.symbol</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/tensorboard/index.html">contrib.tensorboard</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/tensorrt/index.html">contrib.tensorrt</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/contrib/text/index.html">contrib.text</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/legacy/index.html">Legacy</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/callback/index.html">mxnet.callback</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/image/index.html">mxnet.image</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/io/index.html">mxnet.io</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/ndarray/index.html">mxnet.ndarray</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/ndarray.html">ndarray</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/contrib/index.html">ndarray.contrib</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/image/index.html">ndarray.image</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/linalg/index.html">ndarray.linalg</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/op/index.html">ndarray.op</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/random/index.html">ndarray.random</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/register/index.html">ndarray.register</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/sparse/index.html">ndarray.sparse</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/ndarray/utils/index.html">ndarray.utils</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/recordio/index.html">mxnet.recordio</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/symbol/index.html">mxnet.symbol</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/symbol.html">symbol</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/contrib/index.html">symbol.contrib</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/image/index.html">symbol.image</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/linalg/index.html">symbol.linalg</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/op/index.html">symbol.op</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/random/index.html">symbol.random</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/register/index.html">symbol.register</a></li>
<li class="toctree-l4"><a class="reference internal" href="../../../api/legacy/symbol/sparse/index.html">symbol.sparse</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/legacy/visualization/index.html">mxnet.visualization</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/device/index.html">mxnet.device</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/engine/index.html">mxnet.engine</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/executor/index.html">mxnet.executor</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/kvstore_server/index.html">mxnet.kvstore_server</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/profiler/index.html">mxnet.profiler</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/rtc/index.html">mxnet.rtc</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/runtime/index.html">mxnet.runtime</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../../api/runtime/generated/mxnet.runtime.Feature.html">mxnet.runtime.Feature</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/runtime/generated/mxnet.runtime.Features.html">mxnet.runtime.Features</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../../api/runtime/generated/mxnet.runtime.feature_list.html">mxnet.runtime.feature_list</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/test_utils/index.html">mxnet.test_utils</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../../api/util/index.html">mxnet.util</a></li>
</ul>
</li>
</ul>
</nav>
</div>
</header>
<div class="document">
<div class="page-content" role="main">
<h1>Source code for mxnet.ndarray.ndarray</h1><div class="highlight"><pre>
<span></span><span class="c1"># Licensed to the Apache Software Foundation (ASF) under one</span>
<span class="c1"># or more contributor license agreements. See the NOTICE file</span>
<span class="c1"># distributed with this work for additional information</span>
<span class="c1"># regarding copyright ownership. The ASF licenses this file</span>
<span class="c1"># to you under the Apache License, Version 2.0 (the</span>
<span class="c1"># &quot;License&quot;); you may not use this file except in compliance</span>
<span class="c1"># with the License. You may obtain a copy of the License at</span>
<span class="c1">#</span>
<span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="c1">#</span>
<span class="c1"># Unless required by applicable law or agreed to in writing,</span>
<span class="c1"># software distributed under the License is distributed on an</span>
<span class="c1"># &quot;AS IS&quot; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY</span>
<span class="c1"># KIND, either express or implied. See the License for the</span>
<span class="c1"># specific language governing permissions and limitations</span>
<span class="c1"># under the License.</span>
<span class="c1"># coding: utf-8</span>
<span class="c1"># pylint: disable=too-many-lines, protected-access</span>
<span class="c1"># pylint: disable=import-error, no-name-in-module, undefined-variable</span>
<span class="sd">&quot;&quot;&quot;NDArray API of MXNet.&quot;&quot;&quot;</span>
<span class="k">try</span><span class="p">:</span>
<span class="kn">from</span> <span class="nn">__builtin__</span> <span class="kn">import</span> <span class="nb">slice</span> <span class="k">as</span> <span class="n">py_slice</span>
<span class="k">except</span> <span class="ne">ImportError</span><span class="p">:</span>
<span class="kn">from</span> <span class="nn">builtins</span> <span class="kn">import</span> <span class="nb">slice</span> <span class="k">as</span> <span class="n">py_slice</span>
<span class="kn">from</span> <span class="nn">array</span> <span class="kn">import</span> <span class="n">array</span> <span class="k">as</span> <span class="n">native_array</span>
<span class="kn">import</span> <span class="nn">ctypes</span>
<span class="kn">import</span> <span class="nn">warnings</span>
<span class="kn">import</span> <span class="nn">operator</span>
<span class="kn">from</span> <span class="nn">functools</span> <span class="kn">import</span> <span class="n">reduce</span> <span class="c1"># pylint: disable=redefined-builtin</span>
<span class="kn">import</span> <span class="nn">numpy</span> <span class="k">as</span> <span class="nn">np</span>
<span class="kn">from</span> <span class="nn">..base</span> <span class="kn">import</span> <span class="n">_LIB</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">,</span> <span class="n">integer_types</span>
<span class="kn">from</span> <span class="nn">..base</span> <span class="kn">import</span> <span class="n">c_array</span><span class="p">,</span> <span class="n">c_array_buf</span><span class="p">,</span> <span class="n">c_handle_array</span><span class="p">,</span> <span class="n">mx_real_t</span>
<span class="kn">from</span> <span class="nn">..base</span> <span class="kn">import</span> <span class="n">mx_uint</span><span class="p">,</span> <span class="n">NDArrayHandle</span><span class="p">,</span> <span class="n">check_call</span><span class="p">,</span> <span class="n">mx_int</span><span class="p">,</span> <span class="n">mx_int64</span>
<span class="kn">from</span> <span class="nn">..base</span> <span class="kn">import</span> <span class="n">ctypes2buffer</span>
<span class="kn">from</span> <span class="nn">..dlpack</span> <span class="kn">import</span> <span class="n">ndarray_to_dlpack_for_read</span><span class="p">,</span> <span class="n">ndarray_to_dlpack_for_write</span>
<span class="kn">from</span> <span class="nn">..dlpack</span> <span class="kn">import</span> <span class="n">ndarray_from_dlpack</span><span class="p">,</span> <span class="n">ndarray_from_numpy</span>
<span class="kn">from</span> <span class="nn">..runtime</span> <span class="kn">import</span> <span class="n">Features</span>
<span class="kn">from</span> <span class="nn">..device</span> <span class="kn">import</span> <span class="n">Device</span><span class="p">,</span> <span class="n">current_device</span>
<span class="kn">from</span> <span class="nn">..util</span> <span class="kn">import</span> <span class="n">is_np_array</span>
<span class="kn">from</span> <span class="nn">.</span> <span class="kn">import</span> <span class="n">_internal</span>
<span class="kn">from</span> <span class="nn">.</span> <span class="kn">import</span> <span class="n">op</span>
<span class="kn">from</span> <span class="nn">._internal</span> <span class="kn">import</span> <span class="n">NDArrayBase</span>
<span class="n">__all__</span> <span class="o">=</span> <span class="p">[</span><span class="s2">&quot;NDArray&quot;</span><span class="p">,</span> <span class="s2">&quot;concatenate&quot;</span><span class="p">,</span> <span class="s2">&quot;dtype_np_to_mx&quot;</span><span class="p">,</span> <span class="s2">&quot;dtype_mx_to_np&quot;</span><span class="p">,</span> <span class="s2">&quot;_GRAD_REQ_MAP&quot;</span><span class="p">,</span>
<span class="s2">&quot;ones&quot;</span><span class="p">,</span> <span class="s2">&quot;add&quot;</span><span class="p">,</span> <span class="s2">&quot;arange&quot;</span><span class="p">,</span> <span class="s2">&quot;linspace&quot;</span><span class="p">,</span> <span class="s2">&quot;eye&quot;</span><span class="p">,</span> <span class="s2">&quot;divide&quot;</span><span class="p">,</span> <span class="s2">&quot;equal&quot;</span><span class="p">,</span> <span class="s2">&quot;full&quot;</span><span class="p">,</span> <span class="s2">&quot;greater&quot;</span><span class="p">,</span>
<span class="s2">&quot;greater_equal&quot;</span><span class="p">,</span> <span class="s2">&quot;imdecode&quot;</span><span class="p">,</span> <span class="s2">&quot;lesser&quot;</span><span class="p">,</span> <span class="s2">&quot;lesser_equal&quot;</span><span class="p">,</span> <span class="s2">&quot;logical_and&quot;</span><span class="p">,</span> <span class="s2">&quot;logical_or&quot;</span><span class="p">,</span>
<span class="s2">&quot;logical_xor&quot;</span><span class="p">,</span> <span class="s2">&quot;maximum&quot;</span><span class="p">,</span> <span class="s2">&quot;minimum&quot;</span><span class="p">,</span> <span class="s2">&quot;moveaxis&quot;</span><span class="p">,</span> <span class="s2">&quot;modulo&quot;</span><span class="p">,</span> <span class="s2">&quot;multiply&quot;</span><span class="p">,</span> <span class="s2">&quot;not_equal&quot;</span><span class="p">,</span>
<span class="s2">&quot;onehot_encode&quot;</span><span class="p">,</span> <span class="s2">&quot;power&quot;</span><span class="p">,</span> <span class="s2">&quot;subtract&quot;</span><span class="p">,</span> <span class="s2">&quot;true_divide&quot;</span><span class="p">,</span> <span class="s2">&quot;waitall&quot;</span><span class="p">,</span> <span class="s2">&quot;_new_empty_handle&quot;</span><span class="p">,</span>
<span class="s2">&quot;histogram&quot;</span><span class="p">,</span> <span class="s2">&quot;split_v2&quot;</span><span class="p">,</span> <span class="s2">&quot;to_dlpack_for_read&quot;</span><span class="p">,</span> <span class="s2">&quot;to_dlpack_for_write&quot;</span><span class="p">,</span> <span class="s2">&quot;from_dlpack&quot;</span><span class="p">,</span>
<span class="s2">&quot;from_numpy&quot;</span><span class="p">,</span> <span class="s2">&quot;zeros&quot;</span><span class="p">,</span> <span class="s2">&quot;indexing_key_expand_implicit_axes&quot;</span><span class="p">,</span> <span class="s2">&quot;get_indexing_dispatch_code&quot;</span><span class="p">,</span>
<span class="s2">&quot;get_oshape_of_gather_nd_op&quot;</span><span class="p">,</span> <span class="s2">&quot;bfloat16&quot;</span><span class="p">,</span> <span class="s2">&quot;get_dtype_type&quot;</span><span class="p">,</span> <span class="s2">&quot;is_mx_dtype&quot;</span><span class="p">,</span>
<span class="s2">&quot;get_dtype_name&quot;</span><span class="p">]</span>
<span class="n">_STORAGE_TYPE_UNDEFINED</span> <span class="o">=</span> <span class="o">-</span><span class="mi">1</span>
<span class="n">_STORAGE_TYPE_DEFAULT</span> <span class="o">=</span> <span class="mi">0</span>
<span class="n">_STORAGE_TYPE_ROW_SPARSE</span> <span class="o">=</span> <span class="mi">1</span>
<span class="n">_STORAGE_TYPE_CSR</span> <span class="o">=</span> <span class="mi">2</span>
<span class="n">_SIGNED_INT32_UPPER_LIMIT</span> <span class="o">=</span> <span class="p">(</span><span class="mi">2</span><span class="o">**</span><span class="mi">31</span> <span class="o">-</span> <span class="mi">1</span><span class="p">)</span>
<span class="n">bfloat16</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">dtype</span><span class="p">([(</span><span class="s1">&#39;bfloat16&#39;</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">uint16</span><span class="p">)])</span>
<span class="c1"># pylint: disable= no-member</span>
<span class="n">_DTYPE_NP_TO_MX</span> <span class="o">=</span> <span class="p">{</span>
<span class="kc">None</span><span class="p">:</span> <span class="o">-</span><span class="mi">1</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">float32</span><span class="p">:</span> <span class="mi">0</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">float64</span><span class="p">:</span> <span class="mi">1</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">float16</span><span class="p">:</span> <span class="mi">2</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">uint8</span><span class="p">:</span> <span class="mi">3</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">int32</span><span class="p">:</span> <span class="mi">4</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">int8</span><span class="p">:</span> <span class="mi">5</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">int64</span><span class="p">:</span> <span class="mi">6</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">bool_</span><span class="p">:</span> <span class="mi">7</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">int16</span><span class="p">:</span> <span class="mi">8</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">uint16</span> <span class="p">:</span> <span class="mi">9</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">uint32</span> <span class="p">:</span> <span class="mi">10</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">uint64</span> <span class="p">:</span> <span class="mi">11</span><span class="p">,</span>
<span class="n">bfloat16</span><span class="p">:</span> <span class="mi">12</span><span class="p">,</span>
<span class="p">}</span>
<span class="k">def</span> <span class="nf">_register_platform_dependent_mx_dtype</span><span class="p">():</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Register platform dependent types to the fixed size counterparts.&quot;&quot;&quot;</span>
<span class="n">kind_map</span> <span class="o">=</span> <span class="p">{</span><span class="s1">&#39;i&#39;</span><span class="p">:</span> <span class="s1">&#39;int&#39;</span><span class="p">,</span> <span class="s1">&#39;u&#39;</span><span class="p">:</span> <span class="s1">&#39;uint&#39;</span><span class="p">,</span> <span class="s1">&#39;f&#39;</span><span class="p">:</span> <span class="s1">&#39;float&#39;</span><span class="p">}</span>
<span class="k">for</span> <span class="n">np_type</span> <span class="ow">in</span> <span class="p">[</span>
<span class="n">np</span><span class="o">.</span><span class="n">byte</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">ubyte</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">short</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">ushort</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">intc</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">uintc</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">int_</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">uint</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">longlong</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">ulonglong</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">half</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">float16</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">single</span><span class="p">,</span>
<span class="n">np</span><span class="o">.</span><span class="n">double</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">longdouble</span><span class="p">]:</span>
<span class="n">dtype</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">dtype</span><span class="p">(</span><span class="n">np_type</span><span class="p">)</span>
<span class="n">kind</span><span class="p">,</span> <span class="n">size</span> <span class="o">=</span> <span class="n">dtype</span><span class="o">.</span><span class="n">kind</span><span class="p">,</span> <span class="n">dtype</span><span class="o">.</span><span class="n">itemsize</span>
<span class="n">bits</span> <span class="o">=</span> <span class="n">size</span> <span class="o">*</span> <span class="mi">8</span>
<span class="n">fixed_dtype</span> <span class="o">=</span> <span class="nb">getattr</span><span class="p">(</span><span class="n">np</span><span class="p">,</span> <span class="n">kind_map</span><span class="p">[</span><span class="n">kind</span><span class="p">]</span><span class="o">+</span><span class="nb">str</span><span class="p">(</span><span class="n">bits</span><span class="p">))</span>
<span class="k">if</span> <span class="n">fixed_dtype</span> <span class="ow">in</span> <span class="n">_DTYPE_NP_TO_MX</span><span class="p">:</span>
<span class="n">_DTYPE_NP_TO_MX</span><span class="p">[</span><span class="n">np_type</span><span class="p">]</span> <span class="o">=</span> <span class="n">_DTYPE_NP_TO_MX</span><span class="p">[</span><span class="n">fixed_dtype</span><span class="p">]</span>
<span class="n">_register_platform_dependent_mx_dtype</span><span class="p">()</span>
<span class="n">_DTYPE_MX_TO_NP</span> <span class="o">=</span> <span class="p">{</span>
<span class="o">-</span><span class="mi">1</span><span class="p">:</span> <span class="kc">None</span><span class="p">,</span>
<span class="mi">0</span><span class="p">:</span> <span class="n">np</span><span class="o">.</span><span class="n">float32</span><span class="p">,</span>
<span class="mi">1</span><span class="p">:</span> <span class="n">np</span><span class="o">.</span><span class="n">float64</span><span class="p">,</span>
<span class="mi">2</span><span class="p">:</span> <span class="n">np</span><span class="o">.</span><span class="n">float16</span><span class="p">,</span>
<span class="mi">3</span><span class="p">:</span> <span class="n">np</span><span class="o">.</span><span class="n">uint8</span><span class="p">,</span>
<span class="mi">4</span><span class="p">:</span> <span class="n">np</span><span class="o">.</span><span class="n">int32</span><span class="p">,</span>
<span class="mi">5</span><span class="p">:</span> <span class="n">np</span><span class="o">.</span><span class="n">int8</span><span class="p">,</span>
<span class="mi">6</span><span class="p">:</span> <span class="n">np</span><span class="o">.</span><span class="n">int64</span><span class="p">,</span>
<span class="mi">7</span><span class="p">:</span> <span class="n">np</span><span class="o">.</span><span class="n">bool_</span><span class="p">,</span>
<span class="mi">8</span><span class="p">:</span> <span class="n">np</span><span class="o">.</span><span class="n">int16</span><span class="p">,</span>
<span class="mi">9</span><span class="p">:</span> <span class="n">np</span><span class="o">.</span><span class="n">uint16</span><span class="p">,</span>
<span class="mi">10</span><span class="p">:</span> <span class="n">np</span><span class="o">.</span><span class="n">uint32</span><span class="p">,</span>
<span class="mi">11</span><span class="p">:</span> <span class="n">np</span><span class="o">.</span><span class="n">uint64</span><span class="p">,</span>
<span class="mi">12</span><span class="p">:</span> <span class="n">bfloat16</span><span class="p">,</span>
<span class="p">}</span>
<span class="k">def</span> <span class="nf">get_dtype_type</span><span class="p">(</span><span class="n">dtype</span><span class="p">):</span>
<span class="k">if</span> <span class="p">(</span><span class="nb">isinstance</span><span class="p">(</span><span class="n">dtype</span><span class="p">,</span> <span class="nb">str</span><span class="p">)</span> <span class="ow">and</span> <span class="n">dtype</span> <span class="ow">in</span> <span class="n">bfloat16</span><span class="o">.</span><span class="n">names</span><span class="p">)</span> <span class="ow">or</span> <span class="n">np</span><span class="o">.</span><span class="n">dtype</span><span class="p">(</span><span class="n">dtype</span><span class="p">)</span> <span class="o">==</span> <span class="n">bfloat16</span><span class="p">:</span>
<span class="k">return</span> <span class="n">bfloat16</span>
<span class="k">return</span> <span class="n">np</span><span class="o">.</span><span class="n">dtype</span><span class="p">(</span><span class="n">dtype</span><span class="p">)</span><span class="o">.</span><span class="n">type</span>
<span class="k">def</span> <span class="nf">is_mx_dtype</span><span class="p">(</span><span class="n">dtype</span><span class="p">):</span>
<span class="k">return</span> <span class="n">get_dtype_type</span><span class="p">(</span><span class="n">dtype</span><span class="p">)</span> <span class="ow">in</span> <span class="n">_DTYPE_NP_TO_MX</span>
<span class="k">def</span> <span class="nf">get_dtype_name</span><span class="p">(</span><span class="n">dtype</span><span class="p">):</span>
<span class="n">dtype</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">dtype</span><span class="p">(</span><span class="n">get_dtype_type</span><span class="p">(</span><span class="n">dtype</span><span class="p">))</span>
<span class="k">return</span> <span class="n">bfloat16</span><span class="o">.</span><span class="n">names</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="k">if</span> <span class="n">dtype</span> <span class="o">==</span> <span class="n">bfloat16</span> <span class="k">else</span> <span class="n">dtype</span><span class="o">.</span><span class="n">name</span>
<span class="k">def</span> <span class="nf">dtype_np_to_mx</span><span class="p">(</span><span class="n">dtype</span><span class="p">):</span>
<span class="k">if</span> <span class="ow">not</span> <span class="n">is_mx_dtype</span><span class="p">(</span><span class="n">dtype</span><span class="p">):</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="s1">&#39;dtype must be one of: &#39;</span> <span class="o">+</span> <span class="nb">str</span><span class="p">(</span><span class="n">_DTYPE_NP_TO_MX</span><span class="p">))</span>
<span class="n">dtype_type</span> <span class="o">=</span> <span class="n">get_dtype_type</span><span class="p">(</span><span class="n">dtype</span><span class="p">)</span>
<span class="k">return</span> <span class="n">_DTYPE_NP_TO_MX</span><span class="p">[</span><span class="n">dtype_type</span><span class="p">]</span>
<span class="k">def</span> <span class="nf">dtype_mx_to_np</span><span class="p">(</span><span class="n">dtype_idx</span><span class="p">):</span>
<span class="k">return</span> <span class="n">_DTYPE_MX_TO_NP</span><span class="p">[</span><span class="n">dtype_idx</span><span class="p">]</span>
<span class="n">_STORAGE_TYPE_STR_TO_ID</span> <span class="o">=</span> <span class="p">{</span>
<span class="s1">&#39;undefined&#39;</span><span class="p">:</span> <span class="n">_STORAGE_TYPE_UNDEFINED</span><span class="p">,</span>
<span class="s1">&#39;default&#39;</span><span class="p">:</span> <span class="n">_STORAGE_TYPE_DEFAULT</span><span class="p">,</span>
<span class="s1">&#39;row_sparse&#39;</span><span class="p">:</span> <span class="n">_STORAGE_TYPE_ROW_SPARSE</span><span class="p">,</span>
<span class="s1">&#39;csr&#39;</span><span class="p">:</span> <span class="n">_STORAGE_TYPE_CSR</span><span class="p">,</span>
<span class="p">}</span>
<span class="n">_STORAGE_TYPE_ID_TO_STR</span> <span class="o">=</span> <span class="p">{</span>
<span class="n">_STORAGE_TYPE_UNDEFINED</span><span class="p">:</span> <span class="s1">&#39;undefined&#39;</span><span class="p">,</span>
<span class="n">_STORAGE_TYPE_DEFAULT</span><span class="p">:</span> <span class="s1">&#39;default&#39;</span><span class="p">,</span>
<span class="n">_STORAGE_TYPE_ROW_SPARSE</span><span class="p">:</span> <span class="s1">&#39;row_sparse&#39;</span><span class="p">,</span>
<span class="n">_STORAGE_TYPE_CSR</span><span class="p">:</span> <span class="s1">&#39;csr&#39;</span><span class="p">,</span>
<span class="p">}</span>
<span class="n">_GRAD_REQ_MAP</span> <span class="o">=</span> <span class="p">{</span>
<span class="s1">&#39;null&#39;</span><span class="p">:</span> <span class="mi">0</span><span class="p">,</span>
<span class="s1">&#39;write&#39;</span><span class="p">:</span> <span class="mi">1</span><span class="p">,</span>
<span class="s1">&#39;add&#39;</span><span class="p">:</span> <span class="mi">3</span>
<span class="p">}</span>
<span class="c1"># pylint: enable= no-member</span>
<span class="c1"># Return code for dispatching indexing function call</span>
<span class="n">_NDARRAY_UNSUPPORTED_INDEXING</span> <span class="o">=</span> <span class="o">-</span><span class="mi">1</span>
<span class="n">_NDARRAY_BASIC_INDEXING</span> <span class="o">=</span> <span class="mi">0</span>
<span class="n">_NDARRAY_ADVANCED_INDEXING</span> <span class="o">=</span> <span class="mi">1</span>
<span class="n">_NDARRAY_EMPTY_TUPLE_INDEXING</span> <span class="o">=</span> <span class="mi">2</span>
<span class="c1"># Return code for 0-d boolean array handler</span>
<span class="n">_NDARRAY_NO_ZERO_DIM_BOOL_ARRAY</span> <span class="o">=</span> <span class="o">-</span><span class="mi">1</span>
<span class="n">_NDARRAY_ZERO_DIM_BOOL_ARRAY_FALSE</span> <span class="o">=</span> <span class="mi">0</span>
<span class="n">_NDARRAY_ZERO_DIM_BOOL_ARRAY_TRUE</span> <span class="o">=</span> <span class="mi">1</span>
<span class="c1"># Caching whether MXNet was built with INT64 support or not</span>
<span class="n">_INT64_TENSOR_SIZE_ENABLED</span> <span class="o">=</span> <span class="kc">None</span>
<span class="k">def</span> <span class="nf">_int64_enabled</span><span class="p">():</span>
<span class="k">global</span> <span class="n">_INT64_TENSOR_SIZE_ENABLED</span>
<span class="k">if</span> <span class="n">_INT64_TENSOR_SIZE_ENABLED</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">_INT64_TENSOR_SIZE_ENABLED</span> <span class="o">=</span> <span class="n">Features</span><span class="p">()</span><span class="o">.</span><span class="n">is_enabled</span><span class="p">(</span><span class="s1">&#39;INT64_TENSOR_SIZE&#39;</span><span class="p">)</span>
<span class="k">return</span> <span class="n">_INT64_TENSOR_SIZE_ENABLED</span>
<span class="k">def</span> <span class="nf">_new_empty_handle</span><span class="p">():</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a new empty handle.</span>
<span class="sd"> Empty handle can be used to hold a result.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> handle</span>
<span class="sd"> A new empty `NDArray` handle.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">hdl</span> <span class="o">=</span> <span class="n">NDArrayHandle</span><span class="p">()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayCreateNone</span><span class="p">(</span><span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">hdl</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">hdl</span>
<span class="k">def</span> <span class="nf">_new_alloc_handle</span><span class="p">(</span><span class="n">shape</span><span class="p">,</span> <span class="n">ctx</span><span class="p">,</span> <span class="n">delay_alloc</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">mx_real_t</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return a new handle with specified shape and context.</span>
<span class="sd"> Empty handle is only used to hold results.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> handle</span>
<span class="sd"> A new empty `NDArray` handle.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">hdl</span> <span class="o">=</span> <span class="n">NDArrayHandle</span><span class="p">()</span>
<span class="k">if</span> <span class="n">_int64_enabled</span><span class="p">():</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayCreate64</span><span class="p">(</span>
<span class="n">c_array_buf</span><span class="p">(</span><span class="n">mx_int64</span><span class="p">,</span> <span class="n">native_array</span><span class="p">(</span><span class="s1">&#39;q&#39;</span><span class="p">,</span> <span class="n">shape</span><span class="p">)),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">shape</span><span class="p">)),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="n">ctx</span><span class="o">.</span><span class="n">device_typeid</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="n">ctx</span><span class="o">.</span><span class="n">device_id</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="nb">int</span><span class="p">(</span><span class="n">delay_alloc</span><span class="p">)),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="nb">int</span><span class="p">(</span><span class="n">dtype_np_to_mx</span><span class="p">(</span><span class="n">dtype</span><span class="p">))),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">hdl</span><span class="p">)))</span>
<span class="k">else</span><span class="p">:</span>
<span class="c1"># When shape is larger than unit32 then there is an overflow error at python end itself.</span>
<span class="c1"># It needs to be caught here since the call doesn&#39;t even reach backend.</span>
<span class="n">size</span> <span class="o">=</span> <span class="mi">1</span>
<span class="k">for</span> <span class="n">idx</span> <span class="ow">in</span> <span class="n">shape</span><span class="p">:</span>
<span class="n">size</span> <span class="o">=</span> <span class="n">size</span> <span class="o">*</span> <span class="n">idx</span>
<span class="k">if</span> <span class="n">size</span> <span class="o">&gt;</span> <span class="n">_SIGNED_INT32_UPPER_LIMIT</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">Exception</span><span class="p">(</span><span class="s2">&quot;[_new_alloc_handle] Size of tensor you are trying to allocate is &quot;</span> <span class="o">+</span>
<span class="s2">&quot;larger than 2^31 elements. Please build with flag &quot;</span> <span class="o">+</span>
<span class="s2">&quot;USE_INT64_TENSOR_SIZE=1&quot;</span><span class="p">)</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayCreate</span><span class="p">(</span>
<span class="n">c_array_buf</span><span class="p">(</span><span class="n">mx_uint</span><span class="p">,</span> <span class="n">native_array</span><span class="p">(</span><span class="s1">&#39;I&#39;</span><span class="p">,</span> <span class="n">shape</span><span class="p">)),</span>
<span class="n">mx_uint</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">shape</span><span class="p">)),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="n">ctx</span><span class="o">.</span><span class="n">device_typeid</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="n">ctx</span><span class="o">.</span><span class="n">device_id</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="nb">int</span><span class="p">(</span><span class="n">delay_alloc</span><span class="p">)),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="nb">int</span><span class="p">(</span><span class="n">dtype_np_to_mx</span><span class="p">(</span><span class="n">dtype</span><span class="p">))),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">hdl</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">hdl</span>
<span class="k">def</span> <span class="nf">_new_from_shared_mem</span><span class="p">(</span><span class="n">shared_pid</span><span class="p">,</span> <span class="n">shared_id</span><span class="p">,</span> <span class="n">shape</span><span class="p">,</span> <span class="n">dtype</span><span class="p">):</span>
<span class="n">hdl</span> <span class="o">=</span> <span class="n">NDArrayHandle</span><span class="p">()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayCreateFromSharedMem</span><span class="p">(</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="n">shared_pid</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="n">shared_id</span><span class="p">),</span>
<span class="n">c_array</span><span class="p">(</span><span class="n">mx_int</span><span class="p">,</span> <span class="n">shape</span><span class="p">),</span>
<span class="n">mx_int</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">shape</span><span class="p">)),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="nb">int</span><span class="p">(</span><span class="n">dtype_np_to_mx</span><span class="p">(</span><span class="n">dtype</span><span class="p">))),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">hdl</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">hdl</span>
<div class="viewcode-block" id="waitall"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.waitall">[docs]</a><span class="k">def</span> <span class="nf">waitall</span><span class="p">():</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Wait for all async operations to finish in MXNet.</span>
<span class="sd"> This function is used for benchmarking only.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If your mxnet code throws an exception, then waitall can cause performance impact.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayWaitAll</span><span class="p">())</span></div>
<span class="k">def</span> <span class="nf">_storage_type</span><span class="p">(</span><span class="n">handle</span><span class="p">):</span>
<span class="n">storage_type</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayGetStorageType</span><span class="p">(</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">storage_type</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">storage_type</span><span class="o">.</span><span class="n">value</span>
<div class="viewcode-block" id="NDArray"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray">[docs]</a><span class="k">class</span> <span class="nc">NDArray</span><span class="p">(</span><span class="n">NDArrayBase</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;An array object representing a multidimensional, homogeneous array of</span>
<span class="sd">fixed-size items.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="vm">__slots__</span> <span class="o">=</span> <span class="p">[]</span>
<span class="c1"># make numpy functions return NDArray instead of numpy object array</span>
<span class="n">__array_priority__</span> <span class="o">=</span> <span class="mf">1000.0</span>
<span class="c1"># Extension type code for TVM function.</span>
<span class="c1"># See C++ side of definition(kTVMNDArrayTypeCode) at include/mxmet/tensor_blob.h</span>
<span class="n">_tvm_tcode</span> <span class="o">=</span> <span class="mi">19</span>
<span class="c1"># pylint: disable= no-member, undefined-variable</span>
<div class="viewcode-block" id="NDArray.as_np_ndarray"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.as_np_ndarray">[docs]</a> <span class="k">def</span> <span class="nf">as_np_ndarray</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convert mxnet.ndarray.NDArray to mxnet.numpy.ndarray.&quot;&quot;&quot;</span>
<span class="n">storage_type</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">stype</span>
<span class="k">if</span> <span class="n">storage_type</span> <span class="o">!=</span> <span class="s1">&#39;default&#39;</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;cannot convert ndarray of stype </span><span class="si">{}</span><span class="s1"> to numpy ndarray&#39;</span>
<span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="nb">str</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">storage_type</span><span class="p">))))</span>
<span class="kn">from</span> <span class="nn">..numpy</span> <span class="kn">import</span> <span class="n">ndarray</span>
<span class="n">hdl</span> <span class="o">=</span> <span class="n">NDArrayHandle</span><span class="p">()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXShallowCopyNDArray</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">hdl</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">ndarray</span><span class="p">(</span><span class="n">handle</span><span class="o">=</span><span class="n">hdl</span><span class="p">,</span> <span class="n">writable</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">writable</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.as_nd_ndarray"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.as_nd_ndarray">[docs]</a> <span class="k">def</span> <span class="nf">as_nd_ndarray</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;A convenience function for creating a classic ndarray from the current</span>
<span class="sd"> ndarray with zero copy. For this class, it just returns itself since it is</span>
<span class="sd"> already a classic ndarray.&quot;&quot;&quot;</span>
<span class="k">return</span> <span class="bp">self</span></div>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">_tvm_handle</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="o">.</span><span class="n">value</span>
<span class="k">def</span> <span class="fm">__repr__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a string representation of the array.&quot;&quot;&quot;</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_alive</span><span class="p">:</span>
<span class="n">shape_info</span> <span class="o">=</span> <span class="s1">&#39;x&#39;</span><span class="o">.</span><span class="n">join</span><span class="p">([</span><span class="sa">f</span><span class="s1">&#39;</span><span class="si">{</span><span class="n">x</span><span class="si">}</span><span class="s1">&#39;</span> <span class="k">for</span> <span class="n">x</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">])</span>
<span class="k">return</span> <span class="sa">f</span><span class="s1">&#39;</span><span class="se">\n</span><span class="si">{</span><span class="nb">str</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">())</span><span class="si">}</span><span class="se">\n</span><span class="s1">&lt;</span><span class="si">{</span><span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="o">.</span><span class="vm">__name__</span><span class="si">}</span><span class="s1"> </span><span class="si">{</span><span class="n">shape_info</span><span class="si">}</span><span class="s1"> @</span><span class="si">{</span><span class="bp">self</span><span class="o">.</span><span class="n">ctx</span><span class="si">}</span><span class="s1">&gt;&#39;</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="s1">&#39;&lt;FREED </span><span class="si">{}</span><span class="s1">&gt;&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="o">.</span><span class="vm">__name__</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">__reduce__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="k">return</span> <span class="n">NDArray</span><span class="p">,</span> <span class="p">(</span><span class="kc">None</span><span class="p">,),</span> <span class="bp">self</span><span class="o">.</span><span class="n">__getstate__</span><span class="p">()</span>
<span class="k">def</span> <span class="nf">_to_shared_mem</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="n">shared_pid</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">()</span>
<span class="n">shared_id</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayGetSharedMemHandle</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">shared_pid</span><span class="p">),</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">shared_id</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">shared_pid</span><span class="o">.</span><span class="n">value</span><span class="p">,</span> <span class="n">shared_id</span><span class="o">.</span><span class="n">value</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">dtype</span>
<span class="k">def</span> <span class="fm">__abs__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__abs__() &lt;=&gt; abs(x) &lt;=&gt; x.abs() &lt;=&gt; mx.nd.abs(x, y)&quot;&quot;&quot;</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">abs</span><span class="p">()</span>
<span class="k">def</span> <span class="fm">__add__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__add__(y) &lt;=&gt; x+y &lt;=&gt; mx.nd.add(x, y) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">add</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__iadd__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__iadd__(y) &lt;=&gt; x+=y &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">writable</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;trying to add to a readonly NDArray&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">):</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">broadcast_add</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_plus_scalar</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="nb">float</span><span class="p">(</span><span class="n">other</span><span class="p">),</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="sa">f</span><span class="s1">&#39;type </span><span class="si">{</span><span class="nb">str</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">other</span><span class="p">))</span><span class="si">}</span><span class="s1"> not supported&#39;</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__radd__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="fm">__add__</span><span class="p">(</span><span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__sub__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__sub__(y) &lt;=&gt; x-y &lt;=&gt; mx.nd.subtract(x, y) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">subtract</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__isub__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__isub__(y) &lt;=&gt; x-=y &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">writable</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;trying to subtract from a readonly NDArray&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">):</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">broadcast_sub</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_minus_scalar</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="nb">float</span><span class="p">(</span><span class="n">other</span><span class="p">),</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="sa">f</span><span class="s1">&#39;type </span><span class="si">{</span><span class="nb">str</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">other</span><span class="p">))</span><span class="si">}</span><span class="s1"> not supported&#39;</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__rsub__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__rsub__(y) &lt;=&gt; y-x &lt;=&gt; mx.nd.subtract(y, x) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">subtract</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__mul__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__mul__(y) &lt;=&gt; x*y &lt;=&gt; mx.nd.multiply(x, y) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">multiply</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__neg__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__neg__(y) &lt;=&gt; -x &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_mul_scalar</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">-</span><span class="mf">1.0</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__imul__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__imul__(y) &lt;=&gt; x*=y &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">writable</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;trying to multiply to a readonly NDArray&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">):</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">broadcast_mul</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_mul_scalar</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="nb">float</span><span class="p">(</span><span class="n">other</span><span class="p">),</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="sa">f</span><span class="s1">&#39;type </span><span class="si">{</span><span class="nb">str</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">other</span><span class="p">))</span><span class="si">}</span><span class="s1"> not supported&#39;</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__rmul__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="fm">__mul__</span><span class="p">(</span><span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">__div__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__div__(y) &lt;=&gt; x/y &lt;=&gt; mx.nd.divide(x, y) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">divide</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">__rdiv__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__rdiv__(y) &lt;=&gt; y/x &lt;=&gt; mx.nd.divide(y, x) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">divide</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">__idiv__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__rdiv__(y) &lt;=&gt; x/=y &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">writable</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;trying to divide from a readonly NDArray&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">):</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">broadcast_div</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_div_scalar</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="nb">float</span><span class="p">(</span><span class="n">other</span><span class="p">),</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="sa">f</span><span class="s1">&#39;type </span><span class="si">{</span><span class="nb">str</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">other</span><span class="p">))</span><span class="si">}</span><span class="s1"> not supported&#39;</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__truediv__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="k">return</span> <span class="n">divide</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__rtruediv__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="k">return</span> <span class="n">divide</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__itruediv__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">__idiv__</span><span class="p">(</span><span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__mod__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__mod__(y) &lt;=&gt; x%y &lt;=&gt; mx.nd.modulo(x, y) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">modulo</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__rmod__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__rmod__(y) &lt;=&gt; y%x &lt;=&gt; mx.nd.modulo(y, x) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">modulo</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__imod__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__rmod__(y) &lt;=&gt; x%=y &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">writable</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;trying to take modulo from a readonly NDArray&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">):</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">broadcast_mod</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_mod_scalar</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="nb">float</span><span class="p">(</span><span class="n">other</span><span class="p">),</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="sa">f</span><span class="s1">&#39;type </span><span class="si">{</span><span class="nb">str</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">other</span><span class="p">))</span><span class="si">}</span><span class="s1"> not supported&#39;</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__pow__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__pow__(y) &lt;=&gt; x**y &lt;=&gt; mx.nd.power(x,y) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">power</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__rpow__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__pow__(y) &lt;=&gt; y**x &lt;=&gt; mx.nd.power(y,x) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">power</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__eq__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__eq__(y) &lt;=&gt; x==y &lt;=&gt; mx.nd.equal(x, y) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">equal</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__hash__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Default hash function.&quot;&quot;&quot;</span>
<span class="k">return</span> <span class="nb">id</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span><span class="o">//</span><span class="mi">16</span>
<span class="k">def</span> <span class="fm">__ne__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__ne__(y) &lt;=&gt; x!=y &lt;=&gt; mx.nd.not_equal(x, y) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">not_equal</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__gt__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__gt__(y) &lt;=&gt; x&gt;y &lt;=&gt; mx.nd.greater(x, y) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">greater</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__ge__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__ge__(y) &lt;=&gt; x&gt;=y &lt;=&gt; mx.nd.greater_equal(x, y) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">greater_equal</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__lt__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__lt__(y) &lt;=&gt; x&lt;y &lt;=&gt; mx.nd.lesser(x, y) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">lesser</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__le__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__le__(y) &lt;=&gt; x&lt;=y &lt;=&gt; mx.nd.less_equal(x, y) &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">lesser_equal</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">)</span>
<span class="k">def</span> <span class="fm">__bool__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="n">num_elements</span> <span class="o">=</span> <span class="n">reduce</span><span class="p">(</span><span class="n">operator</span><span class="o">.</span><span class="n">mul</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="mi">1</span><span class="p">)</span>
<span class="k">if</span> <span class="n">num_elements</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span> <span class="kc">False</span>
<span class="k">elif</span> <span class="n">num_elements</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="k">return</span> <span class="nb">bool</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">asscalar</span><span class="p">())</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;The truth value of an NDArray with multiple elements &quot;</span> \
<span class="s2">&quot;is ambiguous.&quot;</span><span class="p">)</span>
<span class="n">__nonzero__</span> <span class="o">=</span> <span class="fm">__bool__</span>
<span class="k">def</span> <span class="fm">__str__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a readable string representation of the array.&quot;&quot;&quot;</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">dtype</span> <span class="o">==</span> <span class="n">bfloat16</span><span class="p">:</span>
<span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">NDArray</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="nb">float</span><span class="p">))</span><span class="o">.</span><span class="fm">__str__</span><span class="p">()</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="nb">super</span><span class="p">(</span><span class="n">NDArray</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__str__</span><span class="p">()</span>
<span class="k">def</span> <span class="fm">__len__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Number of element along the first axis.&quot;&quot;&quot;</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
<span class="k">def</span> <span class="nf">__getstate__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="n">handle</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">handle</span>
<span class="n">this</span> <span class="o">=</span> <span class="p">{</span><span class="s1">&#39;handle&#39;</span> <span class="p">:</span> <span class="kc">None</span><span class="p">}</span>
<span class="k">if</span> <span class="n">handle</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">length</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_size_t</span><span class="p">()</span>
<span class="n">cptr</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">POINTER</span><span class="p">(</span><span class="n">ctypes</span><span class="o">.</span><span class="n">c_char</span><span class="p">)()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArraySaveRawBytes</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">length</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">cptr</span><span class="p">)))</span>
<span class="n">this</span><span class="p">[</span><span class="s1">&#39;handle&#39;</span><span class="p">]</span> <span class="o">=</span> <span class="n">ctypes2buffer</span><span class="p">(</span><span class="n">cptr</span><span class="p">,</span> <span class="n">length</span><span class="o">.</span><span class="n">value</span><span class="p">)</span>
<span class="k">return</span> <span class="n">this</span>
<span class="k">def</span> <span class="nf">__setstate__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">state</span><span class="p">):</span>
<span class="c1"># pylint: disable=assigning-non-slot</span>
<span class="n">handle</span> <span class="o">=</span> <span class="n">state</span><span class="p">[</span><span class="s1">&#39;handle&#39;</span><span class="p">]</span>
<span class="k">if</span> <span class="n">handle</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">buf</span> <span class="o">=</span> <span class="n">handle</span>
<span class="n">handle</span> <span class="o">=</span> <span class="n">NDArrayHandle</span><span class="p">()</span>
<span class="n">ptr</span> <span class="o">=</span> <span class="p">(</span><span class="n">ctypes</span><span class="o">.</span><span class="n">c_char</span> <span class="o">*</span> <span class="nb">len</span><span class="p">(</span><span class="n">buf</span><span class="p">))</span><span class="o">.</span><span class="n">from_buffer</span><span class="p">(</span><span class="n">buf</span><span class="p">)</span>
<span class="n">length</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_size_t</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">buf</span><span class="p">))</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayLoadFromRawBytes</span><span class="p">(</span><span class="n">ptr</span><span class="p">,</span> <span class="n">length</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">handle</span><span class="p">)))</span>
<span class="bp">self</span><span class="o">.</span><span class="n">handle</span> <span class="o">=</span> <span class="n">handle</span>
<span class="k">else</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">handle</span> <span class="o">=</span> <span class="kc">None</span>
<span class="k">def</span> <span class="fm">__setitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">key</span><span class="p">,</span> <span class="n">value</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__setitem__(i, y) &lt;=&gt; x[i]=y</span>
<span class="sd"> Sets ``self[key]`` to ``value``.</span>
<span class="sd"> This functions supports advanced indexing as defined in `the NumPy</span>
<span class="sd"> advanced indexing documentation</span>
<span class="sd"> &lt;https://docs.scipy.org/doc/numpy/reference/arrays.indexing.html#advanced-indexing&gt;`_,</span>
<span class="sd"> with the restriction that boolean array indexing is not supported.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> key : int, mxnet.ndarray.slice, list, np.ndarray, NDArray, or tuple of all previous types</span>
<span class="sd"> The indexing key.</span>
<span class="sd"> value : scalar or array-like object that can be broadcast to the shape of self[key]</span>
<span class="sd"> The value to set.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.zeros((2, 3))</span>
<span class="sd"> &gt;&gt;&gt; x[:] = 1</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[:, 1:2] = 2</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 2., 1.],</span>
<span class="sd"> [ 1., 2., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[1:2, 1:] = 3</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 2., 1.],</span>
<span class="sd"> [ 1., 3., 3.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[1:, 0:2] = mx.nd.zeros((1, 2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 2., 1.],</span>
<span class="sd"> [ 0., 0., 3.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[1, 2] = 4</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 2., 1.],</span>
<span class="sd"> [ 0., 0., 4.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[[0], [1, 2]] = 5</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 5., 5.],</span>
<span class="sd"> [ 0., 0., 4.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[::-1, 0:2:2] = [6]</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 6., 5., 5.],</span>
<span class="sd"> [ 6., 0., 4.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">ndim</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="p">(</span><span class="nb">tuple</span><span class="p">,</span> <span class="n">py_slice</span><span class="p">)):</span>
<span class="k">raise</span> <span class="ne">IndexError</span><span class="p">(</span><span class="s1">&#39;scalar tensor can only accept `()` and `:` as index&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)</span> <span class="ow">and</span> <span class="nb">len</span><span class="p">(</span><span class="n">key</span><span class="p">)</span> <span class="o">!=</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">IndexError</span><span class="p">(</span><span class="s1">&#39;scalar tensor can only accept `()` and `:` as index&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_full</span><span class="p">(</span><span class="n">value</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">)</span> <span class="ow">and</span> <span class="n">value</span><span class="o">.</span><span class="n">size</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="k">if</span> <span class="n">value</span><span class="o">.</span><span class="n">shape</span> <span class="o">!=</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">:</span>
<span class="n">value</span> <span class="o">=</span> <span class="n">value</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="n">value</span><span class="o">.</span><span class="n">copyto</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">ndarray</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">generic</span><span class="p">))</span> <span class="ow">and</span> <span class="n">value</span><span class="o">.</span><span class="n">size</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">generic</span><span class="p">)</span> <span class="ow">or</span> <span class="n">value</span><span class="o">.</span><span class="n">shape</span> <span class="o">!=</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">:</span>
<span class="n">value</span> <span class="o">=</span> <span class="n">value</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_sync_copyfrom</span><span class="p">(</span><span class="n">value</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;setting an array element with a sequence.&#39;</span><span class="p">)</span>
<span class="k">elif</span> <span class="bp">self</span><span class="o">.</span><span class="n">size</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">key</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">indexing_key_expand_implicit_axes</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="n">slc_key</span> <span class="o">=</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">idx</span> <span class="k">for</span> <span class="n">idx</span> <span class="ow">in</span> <span class="n">key</span> <span class="k">if</span> <span class="n">idx</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">slc_key</span><span class="p">)</span> <span class="o">&lt;</span> <span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span>
<span class="s1">&#39;too few indices after normalization: expected `ndim` (</span><span class="si">{}</span><span class="s1">) &#39;</span>
<span class="s1">&#39;but got </span><span class="si">{}</span><span class="s1">. This is a bug, please report it!&#39;</span>
<span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">slc_key</span><span class="p">))</span>
<span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">slc_key</span><span class="p">)</span> <span class="o">&gt;</span> <span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">IndexError</span><span class="p">(</span>
<span class="s1">&#39;too many indices (</span><span class="si">{}</span><span class="s1">) for array with </span><span class="si">{}</span><span class="s1"> dimensions&#39;</span>
<span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">slc_key</span><span class="p">),</span> <span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">)</span>
<span class="p">)</span>
<span class="n">indexing_dispatch_code</span> <span class="o">=</span> <span class="n">get_indexing_dispatch_code</span><span class="p">(</span><span class="n">slc_key</span><span class="p">)</span>
<span class="k">if</span> <span class="n">indexing_dispatch_code</span> <span class="o">==</span> <span class="n">_NDARRAY_BASIC_INDEXING</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_set_nd_basic_indexing</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="n">value</span><span class="p">)</span>
<span class="k">elif</span> <span class="n">indexing_dispatch_code</span> <span class="o">==</span> <span class="n">_NDARRAY_ADVANCED_INDEXING</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_set_nd_advanced_indexing</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="n">value</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span>
<span class="s1">&#39;Indexing NDArray with index </span><span class="si">{}</span><span class="s1"> of type </span><span class="si">{}</span><span class="s1"> is not supported&#39;</span>
<span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="nb">type</span><span class="p">(</span><span class="n">key</span><span class="p">))</span>
<span class="p">)</span>
<span class="k">def</span> <span class="fm">__getitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">key</span><span class="p">):</span> <span class="c1"># pylint: disable=too-many-return-statements</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;x.__getitem__(i) &lt;=&gt; x[i]</span>
<span class="sd"> Returns a sliced view of this array if the elements fetched are contiguous in memory;</span>
<span class="sd"> otherwise, returns a newly created NDArray.</span>
<span class="sd"> This functions supports advanced indexing defined in the following reference with</span>
<span class="sd"> some restrictions.</span>
<span class="sd"> For basic indexing, i.e., if ``key`` consists only of integers,</span>
<span class="sd"> ``slice``, ``Ellipsis`` (``...``) and ``None``, a mutable view is</span>
<span class="sd"> returned that shares memory with this array if the accessed portion is</span>
<span class="sd"> contiguous in memory.</span>
<span class="sd"> Otherwise, a newly created ``NDArray`` is returned.</span>
<span class="sd"> This functions supports advanced indexing as defined in `the NumPy</span>
<span class="sd"> advanced indexing documentation</span>
<span class="sd"> &lt;https://docs.scipy.org/doc/numpy/reference/arrays.indexing.html#advanced-indexing&gt;`_,</span>
<span class="sd"> with the restriction that boolean array indexing is not supported.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> key : int, mxnet.ndarray.slice, list, np.ndarray, NDArray, or tuple of all previous types</span>
<span class="sd"> Indexing key.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> The default is to give explicit indices for all axes:</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(0, 6).reshape((2, 3))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 0., 1., 2.],</span>
<span class="sd"> [ 3., 4., 5.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[0, :].asnumpy()</span>
<span class="sd"> array([0., 1., 2.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[0, :2].asnumpy()</span>
<span class="sd"> array([0., 1.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[:, :-1].asnumpy()</span>
<span class="sd"> array([[0., 1.],</span>
<span class="sd"> [3., 4.]], dtype=float32)</span>
<span class="sd"> If fewer indices are given, they are automatically supplemented by an</span>
<span class="sd"> appropriate number of ``slice(None)`` (&quot;``:``&quot;) to the right. For</span>
<span class="sd"> instance, a single integer indexes along the first axis:</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(0, 6).reshape((2, 3))</span>
<span class="sd"> &gt;&gt;&gt; x[0].asnumpy()</span>
<span class="sd"> array([0., 1., 2.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[1:].asnumpy()</span>
<span class="sd"> array([[3., 4., 5.]], dtype=float32)</span>
<span class="sd"> To omit a range of axes that should be kept as-is, an `Ellipsis`</span>
<span class="sd"> (&quot;``...``&quot;) can be used:</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(0, 16).reshape((2, 2, 2, 2))</span>
<span class="sd"> &gt;&gt;&gt; x[0, ..., 1].asnumpy()</span>
<span class="sd"> array([[1., 3.],</span>
<span class="sd"> [5., 7.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[0, :, :, 1].asnumpy() # equivalent</span>
<span class="sd"> array([[1., 3.],</span>
<span class="sd"> [5., 7.]], dtype=float32)</span>
<span class="sd"> New axes of length 1 can be created by inserting ``None``</span>
<span class="sd"> (`numpy.newaxis`) in the index:</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(0, 6).reshape((2, 3))</span>
<span class="sd"> &gt;&gt;&gt; x[None, :, :].asnumpy()</span>
<span class="sd"> array([[[0., 1., 2.],</span>
<span class="sd"> [3., 4., 5.]]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[None, :, :].shape</span>
<span class="sd"> (1, 2, 3)</span>
<span class="sd"> If the indexed portion of the array is contiguous in memory, no data</span>
<span class="sd"> is copied. Instead, a shared-memory view of the original array is</span>
<span class="sd"> returned, and changes to that view affect the original array:</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(0, 8).reshape((2, 2, 2))</span>
<span class="sd"> &gt;&gt;&gt; y = x[0] # contiguous</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[0., 1.],</span>
<span class="sd"> [2., 3.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y[:] = -1</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[[-1., -1.],</span>
<span class="sd"> [-1., -1.]],</span>
<span class="sd"> &lt;BLANKLINE&gt;</span>
<span class="sd"> [[ 4., 5.],</span>
<span class="sd"> [ 6., 7.]]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(0, 8).reshape((2, 2, 2))</span>
<span class="sd"> &gt;&gt;&gt; y = x[1, :1, :] # contiguous</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[4., 5.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y[:] = -1</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[[ 0., 1.],</span>
<span class="sd"> [ 2., 3.]],</span>
<span class="sd"> &lt;BLANKLINE&gt;</span>
<span class="sd"> [[-1., -1.],</span>
<span class="sd"> [ 6., 7.]]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(0, 8).reshape((2, 2, 2))</span>
<span class="sd"> &gt;&gt;&gt; y = x[:, :, 1] # not contiguous</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[1., 3.],</span>
<span class="sd"> [5., 7.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y[:] = -1</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[[0., 1.],</span>
<span class="sd"> [2., 3.]],</span>
<span class="sd"> &lt;BLANKLINE&gt;</span>
<span class="sd"> [[4., 5.],</span>
<span class="sd"> [6., 7.]]], dtype=float32)</span>
<span class="sd"> If the indexing key contains `list`, `numpy.ndarray` or `NDArray`</span>
<span class="sd"> objects, advanced indexing is triggered, which always returns a</span>
<span class="sd"> copy:</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(0, 8).reshape((2, 2, 2))</span>
<span class="sd"> &gt;&gt;&gt; x[[0, 1]].asnumpy()</span>
<span class="sd"> array([[[0., 1.],</span>
<span class="sd"> [2., 3.]],</span>
<span class="sd"> &lt;BLANKLINE&gt;</span>
<span class="sd"> [[4., 5.],</span>
<span class="sd"> [6., 7.]]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x[[0, 1], :].asnumpy() # equivalent</span>
<span class="sd"> array([[[0., 1.],</span>
<span class="sd"> [2., 3.]],</span>
<span class="sd"> &lt;BLANKLINE&gt;</span>
<span class="sd"> [[4., 5.],</span>
<span class="sd"> [6., 7.]]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y = np.array([0, 1], dtype=&#39;int32&#39;)</span>
<span class="sd"> &gt;&gt;&gt; x[1:, y].asnumpy()</span>
<span class="sd"> array([[[4., 5.],</span>
<span class="sd"> [6., 7.]]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.array([0, 1], dtype=&#39;int32&#39;)</span>
<span class="sd"> &gt;&gt;&gt; x[1:, y].asnumpy()</span>
<span class="sd"> array([[[4., 5.],</span>
<span class="sd"> [6., 7.]]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">ndim</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">ndim</span>
<span class="n">shape</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span>
<span class="k">if</span> <span class="n">ndim</span> <span class="o">==</span> <span class="mi">0</span> <span class="ow">and</span> <span class="p">(</span><span class="n">key</span> <span class="o">==</span> <span class="p">()</span> <span class="ow">or</span> <span class="n">key</span> <span class="o">==</span> <span class="nb">slice</span><span class="p">(</span><span class="kc">None</span><span class="p">,</span> <span class="kc">None</span><span class="p">,</span> <span class="kc">None</span><span class="p">)):</span>
<span class="k">return</span> <span class="bp">self</span>
<span class="c1"># Handle simple cases for higher speed</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)</span> <span class="ow">and</span> <span class="nb">len</span><span class="p">(</span><span class="n">key</span><span class="p">)</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span> <span class="bp">self</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)</span> <span class="ow">and</span> <span class="nb">len</span><span class="p">(</span><span class="n">key</span><span class="p">)</span> <span class="o">==</span> <span class="n">ndim</span>\
<span class="ow">and</span> <span class="nb">all</span><span class="p">(</span><span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">integer_types</span><span class="p">)</span> <span class="k">for</span> <span class="n">idx</span> <span class="ow">in</span> <span class="n">key</span><span class="p">):</span>
<span class="n">out</span> <span class="o">=</span> <span class="bp">self</span>
<span class="k">for</span> <span class="n">idx</span> <span class="ow">in</span> <span class="n">key</span><span class="p">:</span>
<span class="n">out</span> <span class="o">=</span> <span class="n">out</span><span class="p">[</span><span class="n">idx</span><span class="p">]</span>
<span class="k">return</span> <span class="n">out</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="n">integer_types</span><span class="p">):</span>
<span class="k">if</span> <span class="n">key</span> <span class="o">&gt;</span> <span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">-</span> <span class="mi">1</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">IndexError</span><span class="p">(</span>
<span class="s1">&#39;index </span><span class="si">{}</span><span class="s1"> is out of bounds for axis 0 with size </span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span>
<span class="n">key</span><span class="p">,</span> <span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]))</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_at</span><span class="p">(</span><span class="n">key</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="n">py_slice</span><span class="p">):</span>
<span class="k">if</span> <span class="p">(</span><span class="n">key</span><span class="o">.</span><span class="n">step</span> <span class="ow">is</span> <span class="kc">None</span> <span class="ow">or</span> <span class="n">key</span><span class="o">.</span><span class="n">step</span> <span class="o">==</span> <span class="mi">1</span><span class="p">):</span>
<span class="k">if</span> <span class="n">key</span><span class="o">.</span><span class="n">start</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span> <span class="ow">or</span> <span class="n">key</span><span class="o">.</span><span class="n">stop</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_slice</span><span class="p">(</span><span class="n">key</span><span class="o">.</span><span class="n">start</span><span class="p">,</span> <span class="n">key</span><span class="o">.</span><span class="n">stop</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="bp">self</span>
<span class="k">elif</span> <span class="n">key</span><span class="o">.</span><span class="n">step</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;slice step cannot be zero&quot;</span><span class="p">)</span>
<span class="n">key</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">indexing_key_expand_implicit_axes</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">key</span><span class="p">)</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;indexing key cannot be an empty tuple&#39;</span><span class="p">)</span>
<span class="n">indexing_dispatch_code</span> <span class="o">=</span> <span class="n">get_indexing_dispatch_code</span><span class="p">(</span><span class="n">key</span><span class="p">)</span>
<span class="k">if</span> <span class="n">indexing_dispatch_code</span> <span class="o">==</span> <span class="n">_NDARRAY_BASIC_INDEXING</span><span class="p">:</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_nd_basic_indexing</span><span class="p">(</span><span class="n">key</span><span class="p">)</span>
<span class="k">elif</span> <span class="n">indexing_dispatch_code</span> <span class="o">==</span> <span class="n">_NDARRAY_ADVANCED_INDEXING</span><span class="p">:</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_nd_advanced_indexing</span><span class="p">(</span><span class="n">key</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">RuntimeError</span>
<span class="k">def</span> <span class="nf">_prepare_value_nd</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">value</span><span class="p">,</span> <span class="n">bcast_shape</span><span class="p">,</span> <span class="n">squeeze_axes</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return a broadcast `NDArray` with same context and dtype as ``self``.</span>
<span class="sd"> For setting item, The returned `ndarray` is squeezed according to squeeze_axes since the</span>
<span class="sd"> value_nd is assigned to not yet expanded space in original array.</span>
<span class="sd"> `value`: numeric types or array like.</span>
<span class="sd"> `bcast_shape`: a shape tuple.</span>
<span class="sd"> `squeeze_axes`: a sequence of axes to squeeze in the value array.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="n">full</span><span class="p">(</span><span class="n">bcast_shape</span><span class="p">,</span> <span class="n">value</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">type</span><span class="p">(</span><span class="n">value</span><span class="p">)</span> <span class="o">==</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="p">:</span> <span class="c1"># pylint: disable=unidiomatic-typecheck</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="n">value</span><span class="o">.</span><span class="n">as_in_context</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">ctx</span><span class="p">)</span>
<span class="k">if</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">dtype</span> <span class="o">!=</span> <span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">:</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">try</span><span class="p">:</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="n">array</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">)</span>
<span class="k">except</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="s1">&#39;</span><span class="si">{}</span><span class="s1"> does not support assignment with non-array-like &#39;</span>
<span class="s1">&#39;object </span><span class="si">{}</span><span class="s1"> of type </span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="p">,</span> <span class="n">value</span><span class="p">,</span> <span class="nb">type</span><span class="p">(</span><span class="n">value</span><span class="p">)))</span>
<span class="c1"># For setitem, if there is None in indices, we need to squeeze the assigned value_nd</span>
<span class="c1"># since None is also ignored in slicing the original array.</span>
<span class="k">if</span> <span class="n">squeeze_axes</span> <span class="ow">and</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">ndim</span> <span class="o">&gt;</span> <span class="nb">len</span><span class="p">(</span><span class="n">bcast_shape</span><span class="p">):</span>
<span class="n">squeeze_axes</span> <span class="o">=</span> <span class="nb">tuple</span><span class="p">([</span><span class="n">ax</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">squeeze_axes</span> <span class="k">if</span> <span class="n">ax</span> <span class="o">&lt;</span> <span class="nb">len</span><span class="p">(</span><span class="n">value_nd</span><span class="o">.</span><span class="n">shape</span><span class="p">)])</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">squeeze</span><span class="p">(</span><span class="n">axis</span><span class="o">=</span><span class="nb">tuple</span><span class="p">(</span><span class="n">squeeze_axes</span><span class="p">))</span>
<span class="c1"># handle the cases like the following</span>
<span class="c1"># a = nd.zeros((3, 3)), b = nd.ones((1, 1, 1, 1, 3)), a[0] = b</span>
<span class="c1"># b cannot broadcast directly to a[0].shape unless its leading 1-size axes are trimmed</span>
<span class="k">if</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">ndim</span> <span class="o">&gt;</span> <span class="nb">len</span><span class="p">(</span><span class="n">bcast_shape</span><span class="p">):</span>
<span class="n">squeeze_axes</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">value_nd</span><span class="o">.</span><span class="n">ndim</span> <span class="o">-</span> <span class="nb">len</span><span class="p">(</span><span class="n">bcast_shape</span><span class="p">)):</span>
<span class="k">if</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="n">squeeze_axes</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">i</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">break</span>
<span class="k">if</span> <span class="n">squeeze_axes</span><span class="p">:</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">squeeze</span><span class="p">(</span><span class="n">squeeze_axes</span><span class="p">)</span>
<span class="k">if</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">shape</span> <span class="o">!=</span> <span class="n">bcast_shape</span><span class="p">:</span>
<span class="k">if</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">size</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">bcast_shape</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">broadcast_to</span><span class="p">(</span><span class="n">bcast_shape</span><span class="p">)</span>
<span class="k">return</span> <span class="n">value_nd</span>
<span class="c1"># pylint: disable=invalid-name</span>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_basic_indexing_key_to_begin_end_step</span><span class="p">(</span><span class="n">idcs</span><span class="p">,</span> <span class="n">shape</span><span class="p">,</span> <span class="n">keep_none</span><span class="o">=</span><span class="kc">True</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Map a tuple of ``slice`` and ``None`` (ignored) to begin, end, step tuples.&quot;&quot;&quot;</span>
<span class="n">idcs</span> <span class="o">=</span> <span class="p">[</span><span class="n">idx</span> <span class="k">for</span> <span class="n">idx</span> <span class="ow">in</span> <span class="n">idcs</span> <span class="k">if</span> <span class="n">idx</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">]</span>
<span class="n">idcs</span> <span class="o">=</span> <span class="p">[</span><span class="n">idx</span> <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">py_slice</span><span class="p">)</span> <span class="k">else</span> <span class="n">_int_to_slice</span><span class="p">(</span><span class="n">idx</span><span class="p">)</span>
<span class="k">for</span> <span class="n">idx</span> <span class="ow">in</span> <span class="n">idcs</span><span class="p">]</span>
<span class="k">if</span> <span class="n">keep_none</span><span class="p">:</span>
<span class="n">sss_list</span> <span class="o">=</span> <span class="p">[(</span><span class="n">slc</span><span class="o">.</span><span class="n">start</span><span class="p">,</span> <span class="n">slc</span><span class="o">.</span><span class="n">stop</span><span class="p">,</span> <span class="n">slc</span><span class="o">.</span><span class="n">step</span><span class="p">)</span> <span class="k">for</span> <span class="n">slc</span><span class="p">,</span> <span class="n">n</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">idcs</span><span class="p">,</span> <span class="n">shape</span><span class="p">)]</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">sss_list</span> <span class="o">=</span> <span class="p">[</span><span class="n">slc</span><span class="o">.</span><span class="n">indices</span><span class="p">(</span><span class="n">n</span><span class="p">)</span> <span class="k">for</span> <span class="n">slc</span><span class="p">,</span> <span class="n">n</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">idcs</span><span class="p">,</span> <span class="n">shape</span><span class="p">)]</span>
<span class="k">return</span> <span class="nb">tuple</span><span class="p">(</span><span class="nb">zip</span><span class="p">(</span><span class="o">*</span><span class="n">sss_list</span><span class="p">))</span>
<span class="c1"># pylint: enable=invalid-name</span>
<span class="c1"># pylint: disable=invalid-name</span>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_basic_indexing_key_int_to_slice</span><span class="p">(</span><span class="n">idcs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return the converted indexing tuple and the integer axes.&quot;&quot;&quot;</span>
<span class="n">int_axes</span> <span class="o">=</span> <span class="p">[]</span>
<span class="n">conv_idcs</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">ax</span><span class="p">,</span> <span class="n">idx</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">idcs</span><span class="p">):</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">integer_types</span><span class="p">):</span>
<span class="n">conv_idcs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">_int_to_slice</span><span class="p">(</span><span class="n">idx</span><span class="p">))</span>
<span class="n">int_axes</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">ax</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">conv_idcs</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">idx</span><span class="p">)</span>
<span class="k">return</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">conv_idcs</span><span class="p">),</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">int_axes</span><span class="p">)</span>
<span class="c1"># pylint: enable=invalid-name</span>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_new_axes_after_basic_indexing</span><span class="p">(</span><span class="n">axes</span><span class="p">,</span> <span class="n">key</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return indices of ``axes`` after slicing with ``key``.</span>
<span class="sd"> This function is used to calculate the positions where new axes should</span>
<span class="sd"> end up after indexing, taking into account the removal of axes by</span>
<span class="sd"> integer indexing.</span>
<span class="sd"> The ``key`` sequence should be the exapanded key including slices, integer types</span>
<span class="sd"> and ``None``.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">steps</span> <span class="o">=</span> <span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">+</span> <span class="p">[</span><span class="mi">0</span> <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">integer_types</span><span class="p">)</span> <span class="k">else</span> <span class="mi">1</span> <span class="k">for</span> <span class="n">idx</span> <span class="ow">in</span> <span class="n">key</span><span class="p">]</span>
<span class="n">cum_steps</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">cumsum</span><span class="p">(</span><span class="n">steps</span><span class="p">)</span>
<span class="n">axes_after</span> <span class="o">=</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">cum_steps</span><span class="p">[</span><span class="n">axes</span><span class="p">])</span>
<span class="k">return</span> <span class="n">axes_after</span>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_new_axes_after_advanced_indexing</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="n">adv_axs</span><span class="p">,</span> <span class="n">bcast_adv_ndim</span><span class="p">,</span> <span class="n">adv_are_adjacent</span><span class="p">):</span> <span class="c1"># pylint: disable=invalid-name</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Return indices of ``axes`` after slicing with ``key_nd``.</span>
<span class="sd"> This function is used to calculate the positions where new axes should</span>
<span class="sd"> end up after indexing, taking into account the removal of axes by</span>
<span class="sd"> integer indexing.</span>
<span class="sd"> The ``key`` sequence should be the exapanded key including slices, array like objects,</span>
<span class="sd"> integer types and ``None``.</span>
<span class="sd"> ``adv_axes`` is the sequence of indices of advanced axes.</span>
<span class="sd"> ``bcast_adv_ndim`` is the number of dimensions of advanced indexing subspace.</span>
<span class="sd"> ``adv_are_adjacent`` is a boolean value. Value being True means all advanced indicies are adjacent.</span>
<span class="sd"> Note: integer indices are also considered advanced indices here.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">new_axes</span> <span class="o">=</span> <span class="p">[</span><span class="n">ax</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">key</span><span class="p">))</span> <span class="k">if</span> <span class="n">key</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">]</span>
<span class="n">adv_axs_set</span> <span class="o">=</span> <span class="nb">set</span><span class="p">(</span><span class="n">adv_axs</span><span class="p">)</span>
<span class="k">if</span> <span class="ow">not</span> <span class="n">adv_are_adjacent</span><span class="p">:</span>
<span class="n">steps</span> <span class="o">=</span> <span class="p">[</span><span class="n">bcast_adv_ndim</span><span class="p">]</span> <span class="o">+</span> <span class="p">[</span><span class="mi">0</span> <span class="k">if</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">adv_axs_set</span> <span class="k">else</span> <span class="mi">1</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">key</span><span class="p">))]</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">steps</span> <span class="o">=</span> <span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">+</span> <span class="p">[</span><span class="mi">0</span> <span class="k">if</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">adv_axs_set</span> <span class="k">else</span> <span class="mi">1</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">key</span><span class="p">))]</span>
<span class="n">cum_steps</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">cumsum</span><span class="p">(</span><span class="n">steps</span><span class="p">)</span>
<span class="n">axes_after</span> <span class="o">=</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">cum_steps</span><span class="p">[</span><span class="n">new_axes</span><span class="p">])</span>
<span class="k">return</span> <span class="n">axes_after</span>
<span class="c1"># pylint: disable=invalid-name</span>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_basic_indexing_slice_is_contiguous</span><span class="p">(</span><span class="n">slc_key</span><span class="p">,</span> <span class="n">shape</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Whether indexing with the given key results in a contiguous array.</span>
<span class="sd"> The rule is: From right to left, if in an axis, a slice produces a</span>
<span class="sd"> proper subset, the later slice must have &lt;=1 elements.</span>
<span class="sd"> The ``slc_key`` sequence must have the same length as ``shape`` and</span>
<span class="sd"> only contain `slice` objects.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="n">slc_key</span><span class="p">)</span> <span class="o">==</span> <span class="nb">len</span><span class="p">(</span><span class="n">shape</span><span class="p">)</span>
<span class="n">is_subset</span> <span class="o">=</span> <span class="kc">False</span>
<span class="n">total_sliced_elements</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">prod</span><span class="p">([</span><span class="n">_get_slice_len</span><span class="p">(</span><span class="n">slc</span><span class="p">,</span> <span class="n">n</span><span class="p">)</span>
<span class="k">for</span> <span class="n">slc</span><span class="p">,</span> <span class="n">n</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">slc_key</span><span class="p">,</span> <span class="n">shape</span><span class="p">)])</span>
<span class="k">if</span> <span class="n">total_sliced_elements</span> <span class="ow">in</span> <span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="mi">1</span><span class="p">):</span>
<span class="k">return</span> <span class="kc">True</span>
<span class="k">for</span> <span class="n">idx</span><span class="p">,</span> <span class="n">n</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="nb">reversed</span><span class="p">(</span><span class="n">slc_key</span><span class="p">),</span> <span class="nb">reversed</span><span class="p">(</span><span class="n">shape</span><span class="p">)):</span>
<span class="n">_</span><span class="p">,</span> <span class="n">_</span><span class="p">,</span> <span class="n">step</span> <span class="o">=</span> <span class="n">idx</span><span class="o">.</span><span class="n">indices</span><span class="p">(</span><span class="n">n</span><span class="p">)</span>
<span class="n">num_elements</span> <span class="o">=</span> <span class="n">_get_slice_len</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">n</span><span class="p">)</span>
<span class="k">if</span> <span class="n">num_elements</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span> <span class="kc">True</span>
<span class="k">elif</span> <span class="n">num_elements</span> <span class="o">&gt;</span> <span class="mi">1</span> <span class="ow">and</span> <span class="p">(</span><span class="n">step</span> <span class="o">&gt;</span> <span class="mi">1</span> <span class="ow">or</span> <span class="n">step</span> <span class="o">&lt;</span> <span class="mi">0</span><span class="p">):</span>
<span class="c1"># We do not support the case of reverse slicing of multiple elements and</span>
<span class="c1"># forward slicing of #elements &gt; 1 and step &gt; 1</span>
<span class="k">return</span> <span class="kc">False</span>
<span class="k">elif</span> <span class="n">is_subset</span><span class="p">:</span>
<span class="k">if</span> <span class="n">num_elements</span> <span class="o">&gt;</span> <span class="mi">1</span><span class="p">:</span>
<span class="k">return</span> <span class="kc">False</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">if</span> <span class="n">num_elements</span> <span class="o">&lt;</span> <span class="n">n</span><span class="p">:</span>
<span class="n">is_subset</span> <span class="o">=</span> <span class="kc">True</span>
<span class="k">return</span> <span class="kc">True</span>
<span class="c1"># pylint: enable=invalid-name</span>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_basic_indexing_sliced_shape</span><span class="p">(</span><span class="n">slc_key</span><span class="p">,</span> <span class="n">shape</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return the shape after slicing with the given key.&quot;&quot;&quot;</span>
<span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="n">slc_key</span><span class="p">)</span> <span class="o">==</span> <span class="nb">len</span><span class="p">(</span><span class="n">shape</span><span class="p">)</span>
<span class="n">sliced_shape</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">slc</span><span class="p">,</span> <span class="n">n</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">slc_key</span><span class="p">,</span> <span class="n">shape</span><span class="p">):</span>
<span class="n">num_elements</span> <span class="o">=</span> <span class="n">_get_slice_len</span><span class="p">(</span><span class="n">slc</span><span class="p">,</span> <span class="n">n</span><span class="p">)</span>
<span class="n">sliced_shape</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">num_elements</span><span class="p">)</span>
<span class="k">return</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">sliced_shape</span><span class="p">)</span>
<span class="c1"># pylint: disable=invalid-name</span>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_basic_indexing_contiguous_flat_begin_end</span><span class="p">(</span><span class="n">slc_key</span><span class="p">,</span> <span class="n">shape</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return the flat indices of begin and end for contiguous slicing.&quot;&quot;&quot;</span>
<span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="n">slc_key</span><span class="p">)</span> <span class="o">==</span> <span class="nb">len</span><span class="p">(</span><span class="n">shape</span><span class="p">)</span>
<span class="n">flat_begin</span><span class="p">,</span> <span class="n">flat_end</span> <span class="o">=</span> <span class="mi">0</span><span class="p">,</span> <span class="mi">0</span>
<span class="k">for</span> <span class="n">slc</span><span class="p">,</span> <span class="n">n</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">slc_key</span><span class="p">,</span> <span class="n">shape</span><span class="p">):</span>
<span class="n">flat_begin</span> <span class="o">*=</span> <span class="n">n</span>
<span class="n">flat_end</span> <span class="o">*=</span> <span class="n">n</span>
<span class="n">begin</span><span class="p">,</span> <span class="n">_</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">slc</span><span class="o">.</span><span class="n">indices</span><span class="p">(</span><span class="n">n</span><span class="p">)</span>
<span class="n">num_elements</span> <span class="o">=</span> <span class="n">_get_slice_len</span><span class="p">(</span><span class="n">slc</span><span class="p">,</span> <span class="n">n</span><span class="p">)</span>
<span class="k">if</span> <span class="n">num_elements</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span> <span class="mi">0</span><span class="p">,</span> <span class="mi">0</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">flat_begin</span> <span class="o">+=</span> <span class="n">begin</span>
<span class="n">flat_end</span> <span class="o">+=</span> <span class="n">begin</span> <span class="o">+</span> <span class="n">num_elements</span> <span class="o">-</span> <span class="mi">1</span>
<span class="k">return</span> <span class="n">flat_begin</span><span class="p">,</span> <span class="n">flat_end</span> <span class="o">+</span> <span class="mi">1</span>
<span class="c1"># pylint: enable=invalid-name</span>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_drop_int_axes</span><span class="p">(</span><span class="n">indexed_shape</span><span class="p">,</span> <span class="n">int_axes</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;drop the axis of indexed_shape corresponding to int axes&quot;&quot;&quot;</span>
<span class="n">bcast_shape</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="n">size</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">indexed_shape</span><span class="p">):</span>
<span class="k">if</span> <span class="n">i</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">int_axes</span><span class="p">:</span>
<span class="n">bcast_shape</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">size</span><span class="p">)</span>
<span class="k">if</span> <span class="ow">not</span> <span class="n">bcast_shape</span><span class="p">:</span>
<span class="n">bcast_shape</span> <span class="o">=</span> <span class="p">[</span><span class="mi">1</span><span class="p">]</span>
<span class="k">return</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">bcast_shape</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_set_nd_basic_indexing</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">key</span><span class="p">,</span> <span class="n">value</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;This function indexes ``self`` with a tuple of ``slice`` objects only.&quot;&quot;&quot;</span>
<span class="k">for</span> <span class="n">idx</span> <span class="ow">in</span> <span class="n">key</span><span class="p">:</span>
<span class="k">if</span> <span class="n">idx</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span> <span class="ow">and</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="p">(</span><span class="n">py_slice</span><span class="p">,</span> <span class="n">integer_types</span><span class="p">)):</span>
<span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span>
<span class="s1">&#39;`key` may only contain `slice` or integer objects in the &#39;</span>
<span class="s1">&#39;basic implementation, got object of type </span><span class="si">{}</span><span class="s1">. &#39;</span>
<span class="s1">&#39;This is a bug, please report it!&#39;</span>
<span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">idx</span><span class="p">)))</span>
<span class="n">key_nd</span> <span class="o">=</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">idx</span> <span class="k">for</span> <span class="n">idx</span> <span class="ow">in</span> <span class="n">key</span> <span class="k">if</span> <span class="n">idx</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">)</span>
<span class="n">int_axes</span> <span class="o">=</span> <span class="p">[</span>
<span class="n">ax</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">key_nd</span><span class="p">))</span> <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">key_nd</span><span class="p">[</span><span class="n">ax</span><span class="p">],</span> <span class="n">integer_types</span><span class="p">)</span>
<span class="p">]</span>
<span class="c1"># Check bounds for integer axes</span>
<span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">int_axes</span><span class="p">:</span> <span class="c1"># pylint: disable=invalid-name</span>
<span class="k">if</span> <span class="ow">not</span> <span class="o">-</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="o">&lt;=</span> <span class="n">key_nd</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="o">&lt;</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">ax</span><span class="p">]:</span>
<span class="k">raise</span> <span class="ne">IndexError</span><span class="p">(</span>
<span class="s1">&#39;index </span><span class="si">{}</span><span class="s1"> is out of bounds for axis </span><span class="si">{}</span><span class="s1"> with size </span><span class="si">{}</span><span class="s1">&#39;</span>
<span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">key_nd</span><span class="p">[</span><span class="n">ax</span><span class="p">],</span> <span class="n">ax</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">ax</span><span class="p">]))</span>
<span class="n">begin</span><span class="p">,</span> <span class="n">end</span><span class="p">,</span> <span class="n">step</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_basic_indexing_key_to_begin_end_step</span><span class="p">(</span>
<span class="n">key</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">keep_none</span><span class="o">=</span><span class="kc">False</span>
<span class="p">)</span>
<span class="n">indexed_shape</span> <span class="o">=</span> <span class="nb">tuple</span><span class="p">(</span>
<span class="n">_get_dim_size</span><span class="p">(</span><span class="n">b</span><span class="p">,</span> <span class="n">e</span><span class="p">,</span> <span class="n">s</span><span class="p">)</span> <span class="k">for</span> <span class="n">b</span><span class="p">,</span> <span class="n">e</span><span class="p">,</span> <span class="n">s</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">begin</span><span class="p">,</span> <span class="n">end</span><span class="p">,</span> <span class="n">step</span><span class="p">)</span>
<span class="p">)</span>
<span class="n">can_assign_directly</span> <span class="o">=</span> <span class="p">(</span>
<span class="p">(</span><span class="n">indexed_shape</span> <span class="o">==</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="ow">and</span> <span class="nb">all</span><span class="p">(</span><span class="n">s</span> <span class="o">&gt;</span> <span class="mi">0</span> <span class="k">for</span> <span class="n">s</span> <span class="ow">in</span> <span class="n">step</span><span class="p">)</span>
<span class="p">)</span>
<span class="n">begin</span><span class="p">,</span> <span class="n">end</span><span class="p">,</span> <span class="n">step</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_basic_indexing_key_to_begin_end_step</span><span class="p">(</span>
<span class="n">key</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">keep_none</span><span class="o">=</span><span class="kc">True</span>
<span class="p">)</span>
<span class="n">none_axes</span> <span class="o">=</span> <span class="p">[</span><span class="n">ax</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">key</span><span class="p">))</span> <span class="k">if</span> <span class="n">key</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">]</span>
<span class="n">new_axes</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_new_axes_after_basic_indexing</span><span class="p">(</span><span class="n">none_axes</span><span class="p">,</span> <span class="n">key</span><span class="p">)</span>
<span class="k">if</span> <span class="n">can_assign_directly</span><span class="p">:</span>
<span class="c1"># Easy case, overwrite whole array.</span>
<span class="k">if</span> <span class="nb">type</span><span class="p">(</span><span class="n">value</span><span class="p">)</span> <span class="o">==</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="p">:</span> <span class="c1"># pylint: disable=unidiomatic-typecheck</span>
<span class="k">if</span> <span class="n">value</span><span class="o">.</span><span class="n">handle</span> <span class="ow">is</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">:</span>
<span class="c1"># Need to do this before `broadcast_to`.</span>
<span class="n">bcast_shape</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_drop_int_axes</span><span class="p">(</span><span class="n">indexed_shape</span><span class="p">,</span> <span class="n">int_axes</span><span class="p">)</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_prepare_value_nd</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">bcast_shape</span><span class="o">=</span><span class="n">bcast_shape</span><span class="p">,</span> <span class="n">squeeze_axes</span><span class="o">=</span><span class="n">new_axes</span><span class="p">)</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">indexed_shape</span><span class="p">)</span>
<span class="n">value_nd</span><span class="o">.</span><span class="n">copyto</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="nb">bool</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_full</span><span class="p">(</span><span class="nb">int</span><span class="p">(</span><span class="n">value</span><span class="p">))</span>
<span class="k">else</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_full</span><span class="p">(</span><span class="n">value</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">ndarray</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">generic</span><span class="p">)):</span>
<span class="n">tmp_shape</span> <span class="o">=</span> <span class="n">_shape_for_bcast</span><span class="p">(</span>
<span class="n">value</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">target_ndim</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">,</span> <span class="n">new_axes</span><span class="o">=</span><span class="n">int_axes</span>
<span class="p">)</span>
<span class="n">value</span> <span class="o">=</span> <span class="n">value</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">tmp_shape</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">generic</span><span class="p">)</span> <span class="ow">or</span> <span class="n">value</span><span class="o">.</span><span class="n">shape</span> <span class="o">!=</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">:</span>
<span class="n">value</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">broadcast_to</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_sync_copyfrom</span><span class="p">(</span><span class="n">value</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="c1"># Other array-like</span>
<span class="c1"># drop the axis of indexed_shape corresponding to int axes</span>
<span class="n">bcast_shape</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_drop_int_axes</span><span class="p">(</span><span class="n">indexed_shape</span><span class="p">,</span> <span class="n">int_axes</span><span class="p">)</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_prepare_value_nd</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">bcast_shape</span><span class="o">=</span><span class="n">bcast_shape</span><span class="p">,</span> <span class="n">squeeze_axes</span><span class="o">=</span><span class="n">new_axes</span><span class="p">)</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">indexed_shape</span><span class="p">)</span>
<span class="n">value_nd</span><span class="o">.</span><span class="n">copyto</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">slice_assign_scalar</span><span class="p">(</span><span class="nb">float</span><span class="p">(</span><span class="n">value</span><span class="p">),</span> <span class="n">begin</span><span class="p">,</span> <span class="n">end</span><span class="p">,</span> <span class="n">step</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="c1"># drop the axis of indexed_shape corresponding to int axes</span>
<span class="n">bcast_shape</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_drop_int_axes</span><span class="p">(</span><span class="n">indexed_shape</span><span class="p">,</span> <span class="n">int_axes</span><span class="p">)</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_prepare_value_nd</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">bcast_shape</span><span class="o">=</span><span class="n">bcast_shape</span><span class="p">,</span> <span class="n">squeeze_axes</span><span class="o">=</span><span class="n">new_axes</span><span class="p">)</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="n">value_nd</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">indexed_shape</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">slice_assign</span><span class="p">(</span><span class="n">value_nd</span><span class="p">,</span> <span class="n">begin</span><span class="p">,</span> <span class="n">end</span><span class="p">,</span> <span class="n">step</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_get_nd_basic_indexing</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">key</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;This function indexes ``self`` with a tuple of `slice` objects only.&quot;&quot;&quot;</span>
<span class="n">key_nd</span> <span class="o">=</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">idx</span> <span class="k">for</span> <span class="n">idx</span> <span class="ow">in</span> <span class="n">key</span> <span class="k">if</span> <span class="n">idx</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">key_nd</span><span class="p">)</span> <span class="o">&lt;</span> <span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span>
<span class="s1">&#39;too few indices after normalization: expected `ndim` (</span><span class="si">{}</span><span class="s1">) &#39;</span>
<span class="s1">&#39;but got </span><span class="si">{}</span><span class="s1">. This is a bug, please report it!&#39;</span>
<span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">key_nd</span><span class="p">))</span>
<span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">key_nd</span><span class="p">)</span> <span class="o">&gt;</span> <span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">IndexError</span><span class="p">(</span>
<span class="s1">&#39;too many indices (</span><span class="si">{}</span><span class="s1">) for array with </span><span class="si">{}</span><span class="s1"> dimensions&#39;</span>
<span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">key_nd</span><span class="p">),</span> <span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">)</span>
<span class="p">)</span>
<span class="n">slc_key</span><span class="p">,</span> <span class="n">int_axes</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_basic_indexing_key_int_to_slice</span><span class="p">(</span><span class="n">key_nd</span><span class="p">)</span>
<span class="n">none_axes</span> <span class="o">=</span> <span class="p">[</span><span class="n">ax</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">key</span><span class="p">))</span> <span class="k">if</span> <span class="n">key</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">]</span>
<span class="k">if</span> <span class="n">none_axes</span><span class="p">:</span>
<span class="n">new_axes</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_new_axes_after_basic_indexing</span><span class="p">(</span><span class="n">none_axes</span><span class="p">,</span> <span class="n">key</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">new_axes</span> <span class="o">=</span> <span class="p">[]</span>
<span class="c1"># Check bounds for integer axes</span>
<span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">int_axes</span><span class="p">:</span> <span class="c1"># pylint: disable=invalid-name</span>
<span class="k">if</span> <span class="ow">not</span> <span class="o">-</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="o">&lt;=</span> <span class="n">key_nd</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="o">&lt;</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">ax</span><span class="p">]:</span>
<span class="k">raise</span> <span class="ne">IndexError</span><span class="p">(</span>
<span class="s1">&#39;index </span><span class="si">{}</span><span class="s1"> is out of bounds for axis </span><span class="si">{}</span><span class="s1"> with size </span><span class="si">{}</span><span class="s1">&#39;</span>
<span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">key_nd</span><span class="p">[</span><span class="n">ax</span><span class="p">],</span> <span class="n">ax</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">ax</span><span class="p">]))</span>
<span class="c1"># Convert to begin, end and step, and return immediately if the slice</span>
<span class="c1"># is empty</span>
<span class="n">begin</span><span class="p">,</span> <span class="n">end</span><span class="p">,</span> <span class="n">step</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_basic_indexing_key_to_begin_end_step</span><span class="p">(</span>
<span class="n">slc_key</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">keep_none</span><span class="o">=</span><span class="kc">False</span>
<span class="p">)</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_basic_indexing_slice_is_contiguous</span><span class="p">(</span><span class="n">slc_key</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">):</span>
<span class="c1"># Create a shared-memory view by using low-level flat slicing</span>
<span class="n">flat_begin</span><span class="p">,</span> <span class="n">flat_end</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_basic_indexing_contiguous_flat_begin_end</span><span class="p">(</span>
<span class="n">slc_key</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span>
<span class="p">)</span>
<span class="n">handle</span> <span class="o">=</span> <span class="n">NDArrayHandle</span><span class="p">()</span>
<span class="n">flat_self</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">)</span>
<span class="k">if</span> <span class="n">_int64_enabled</span><span class="p">():</span>
<span class="n">check_call</span><span class="p">(</span>
<span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArraySlice64</span><span class="p">(</span>
<span class="n">flat_self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int64</span><span class="p">(</span><span class="n">flat_begin</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int64</span><span class="p">(</span><span class="n">flat_end</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">handle</span><span class="p">),</span>
<span class="p">)</span>
<span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">check_call</span><span class="p">(</span>
<span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArraySlice</span><span class="p">(</span>
<span class="n">flat_self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_uint32</span><span class="p">(</span><span class="n">flat_begin</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_uint32</span><span class="p">(</span><span class="n">flat_end</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">handle</span><span class="p">),</span>
<span class="p">)</span>
<span class="p">)</span>
<span class="n">sliced_shape</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_basic_indexing_sliced_shape</span><span class="p">(</span><span class="n">slc_key</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="n">sliced</span> <span class="o">=</span> <span class="n">NDArray</span><span class="p">(</span><span class="n">handle</span><span class="o">=</span><span class="n">handle</span><span class="p">,</span> <span class="n">writable</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">writable</span><span class="p">)</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">sliced_shape</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">begin</span><span class="p">,</span> <span class="n">end</span><span class="p">,</span> <span class="n">step</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_basic_indexing_key_to_begin_end_step</span><span class="p">(</span>
<span class="n">slc_key</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">keep_none</span><span class="o">=</span><span class="kc">True</span>
<span class="p">)</span>
<span class="n">sliced</span> <span class="o">=</span> <span class="n">op</span><span class="o">.</span><span class="n">slice</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">begin</span><span class="p">,</span> <span class="n">end</span><span class="p">,</span> <span class="n">step</span><span class="p">)</span>
<span class="c1"># Reshape to final shape due to integer and `None` entries in `key`.</span>
<span class="n">final_shape</span> <span class="o">=</span> <span class="p">[</span><span class="n">sliced</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">sliced</span><span class="o">.</span><span class="n">ndim</span><span class="p">)</span>
<span class="k">if</span> <span class="n">i</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">int_axes</span><span class="p">]</span>
<span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">new_axes</span><span class="p">:</span> <span class="c1"># pylint: disable=invalid-name</span>
<span class="n">final_shape</span><span class="o">.</span><span class="n">insert</span><span class="p">(</span><span class="n">ax</span><span class="p">,</span> <span class="mi">1</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">final_shape</span><span class="p">)</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="c1"># Override for single element indexing</span>
<span class="n">final_shape</span> <span class="o">=</span> <span class="p">[</span><span class="mi">1</span><span class="p">]</span>
<span class="k">return</span> <span class="n">sliced</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">final_shape</span><span class="p">)</span>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_advanced_index_to_array</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">ax_len</span><span class="p">,</span> <span class="n">ctx</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convert ``idx`` to `NDArray` for advanced indexing.</span>
<span class="sd"> The ``ax_len`` is used to convert `slice` objects to integer arrays.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="n">_int64_enabled</span><span class="p">():</span>
<span class="n">idx_dtype</span> <span class="o">=</span> <span class="s1">&#39;int64&#39;</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">idx_dtype</span> <span class="o">=</span> <span class="s1">&#39;int32&#39;</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">):</span>
<span class="k">if</span> <span class="n">idx</span><span class="o">.</span><span class="n">dtype</span> <span class="o">!=</span> <span class="n">idx_dtype</span><span class="p">:</span>
<span class="n">idx</span> <span class="o">=</span> <span class="n">idx</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="n">idx_dtype</span><span class="p">)</span>
<span class="k">return</span> <span class="n">idx</span><span class="o">.</span><span class="n">as_in_context</span><span class="p">(</span><span class="n">ctx</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">ndarray</span><span class="p">,</span> <span class="nb">list</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)):</span>
<span class="k">return</span> <span class="n">array</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">ctx</span><span class="p">,</span> <span class="n">idx_dtype</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">integer_types</span><span class="p">):</span>
<span class="k">return</span> <span class="n">array</span><span class="p">([</span><span class="n">idx</span><span class="p">],</span> <span class="n">ctx</span><span class="p">,</span> <span class="n">idx_dtype</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">py_slice</span><span class="p">):</span>
<span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="p">,</span> <span class="n">step</span> <span class="o">=</span> <span class="n">idx</span><span class="o">.</span><span class="n">indices</span><span class="p">(</span><span class="n">ax_len</span><span class="p">)</span>
<span class="k">return</span> <span class="n">arange</span><span class="p">(</span><span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="p">,</span> <span class="n">step</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">idx_dtype</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="nb">range</span><span class="p">):</span>
<span class="k">return</span> <span class="n">arange</span><span class="p">(</span><span class="n">idx</span><span class="o">.</span><span class="n">start</span><span class="p">,</span> <span class="n">idx</span><span class="o">.</span><span class="n">stop</span><span class="p">,</span> <span class="n">idx</span><span class="o">.</span><span class="n">step</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">idx_dtype</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s1">&#39;illegal index type </span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">idx</span><span class="p">)))</span>
<span class="c1"># pylint: disable=invalid-name</span>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_broadcast_advanced_indices</span><span class="p">(</span><span class="n">arrays</span><span class="p">,</span> <span class="n">block_axes</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Broadcast arrays according to position in the sequence.</span>
<span class="sd"> Here, &quot;according to position&quot; means that an array of dimension 1</span>
<span class="sd"> (which is the case for all except ``block_axes``) will have shape</span>
<span class="sd"> ``(1, ..., 1, N, 1, ..., 1)``, where ``N`` is the length, and the</span>
<span class="sd"> position of ``N`` in the shape is the same as the position of the</span>
<span class="sd"> array in the ``arrays`` sequence, plus extra dimensions of the</span>
<span class="sd"> advanced block if it is left of the array.</span>
<span class="sd"> The arrays at ``block_axes`` are the advanced indices. They are assumed to</span>
<span class="sd"> be ready for mutual broadcasting to produce the advanced indexing block.</span>
<span class="sd"> It is further assumed that the numbers in ``block_axes`` are consecutive.</span>
<span class="sd"> The return value is a tuple containing the arrays with broadcast shapes.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">block_shape</span> <span class="o">=</span> <span class="n">_broadcast_shapes</span><span class="p">([</span><span class="n">arrays</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">block_axes</span><span class="p">])</span>
<span class="n">ndim_blk</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">block_shape</span><span class="p">)</span>
<span class="n">ndim_blk_delta</span> <span class="o">=</span> <span class="n">ndim_blk</span> <span class="o">-</span> <span class="nb">len</span><span class="p">(</span><span class="n">block_axes</span><span class="p">)</span>
<span class="n">ndim_lead</span> <span class="o">=</span> <span class="n">block_axes</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
<span class="n">ndim_trail</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">arrays</span><span class="p">)</span> <span class="o">-</span> <span class="p">(</span><span class="n">block_axes</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">]</span> <span class="o">+</span> <span class="mi">1</span><span class="p">)</span>
<span class="n">bcast_shape</span> <span class="o">=</span> <span class="p">(</span>
<span class="nb">tuple</span><span class="p">(</span><span class="n">arrays</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">ndim_lead</span><span class="p">))</span> <span class="o">+</span>
<span class="n">block_shape</span> <span class="o">+</span>
<span class="nb">tuple</span><span class="p">(</span><span class="n">arrays</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">block_axes</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">]</span> <span class="o">+</span> <span class="mi">1</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">arrays</span><span class="p">)))</span>
<span class="p">)</span>
<span class="n">bcast_arrays</span> <span class="o">=</span> <span class="p">[</span><span class="kc">None</span><span class="p">]</span> <span class="o">*</span> <span class="nb">len</span><span class="p">(</span><span class="n">arrays</span><span class="p">)</span>
<span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">block_axes</span><span class="p">:</span>
<span class="n">arr</span> <span class="o">=</span> <span class="n">arrays</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span><span class="o">.</span><span class="n">broadcast_to</span><span class="p">(</span><span class="n">block_shape</span><span class="p">)</span>
<span class="n">shp</span> <span class="o">=</span> <span class="p">(</span><span class="mi">1</span><span class="p">,)</span> <span class="o">*</span> <span class="n">ndim_lead</span> <span class="o">+</span> <span class="n">block_shape</span> <span class="o">+</span> <span class="p">(</span><span class="mi">1</span><span class="p">,)</span> <span class="o">*</span> <span class="n">ndim_trail</span>
<span class="n">bcast_arrays</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="o">=</span> <span class="n">arr</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">shp</span><span class="p">)</span><span class="o">.</span><span class="n">broadcast_to</span><span class="p">(</span><span class="n">bcast_shape</span><span class="p">)</span>
<span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="nb">set</span><span class="p">(</span><span class="nb">range</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">arrays</span><span class="p">)))</span> <span class="o">-</span> <span class="nb">set</span><span class="p">(</span><span class="n">block_axes</span><span class="p">):</span>
<span class="n">shp</span> <span class="o">=</span> <span class="p">[</span><span class="mi">1</span><span class="p">]</span> <span class="o">*</span> <span class="nb">len</span><span class="p">(</span><span class="n">bcast_shape</span><span class="p">)</span>
<span class="k">if</span> <span class="n">ax</span> <span class="o">&lt;</span> <span class="n">ndim_lead</span><span class="p">:</span>
<span class="n">shp</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="o">=</span> <span class="n">arrays</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">shp</span><span class="p">[</span><span class="n">ax</span> <span class="o">+</span> <span class="n">ndim_blk_delta</span><span class="p">]</span> <span class="o">=</span> <span class="n">arrays</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
<span class="n">bcast_arrays</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="o">=</span> <span class="n">arrays</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">shp</span><span class="p">)</span><span class="o">.</span><span class="n">broadcast_to</span><span class="p">(</span><span class="n">bcast_shape</span><span class="p">)</span>
<span class="k">return</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">bcast_arrays</span><span class="p">)</span>
<span class="c1"># pylint: enable=invalid-name</span>
<span class="nd">@staticmethod</span>
<span class="k">def</span> <span class="nf">_drop_slice_none_at_end</span><span class="p">(</span><span class="n">key</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Remove ``slice(None)`` at the end of a key.</span>
<span class="sd"> This is used for efficiency in advanced indexing, to avoid generating</span>
<span class="sd"> ``arange(n)`` arrays for these axes. The `gather_nd` and `scatter_nd`</span>
<span class="sd"> handle implicit full trailing axes automatically.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">key</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">key</span><span class="p">)</span>
<span class="k">while</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">key</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">],</span> <span class="n">py_slice</span><span class="p">)</span> <span class="ow">and</span> <span class="n">key</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">]</span> <span class="o">==</span> <span class="nb">slice</span><span class="p">(</span><span class="kc">None</span><span class="p">):</span>
<span class="n">key</span><span class="o">.</span><span class="n">pop</span><span class="p">()</span>
<span class="k">return</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">key</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_get_index_nd</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">key</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Return an index array for use in `scatter_nd` and `gather_nd`,</span>
<span class="sd"> and a list of positions of new_axes in ouptut shape.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">key_nd</span> <span class="o">=</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">idx</span> <span class="k">for</span> <span class="n">idx</span> <span class="ow">in</span> <span class="n">key</span> <span class="k">if</span> <span class="n">idx</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">key_nd</span><span class="p">)</span> <span class="o">&lt;</span> <span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span>
<span class="s1">&#39;too few indices after normalization: expected `ndim` (</span><span class="si">{}</span><span class="s1">) &#39;</span>
<span class="s1">&#39;but got </span><span class="si">{}</span><span class="s1">. This is a bug, please report it!&#39;</span>
<span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">key_nd</span><span class="p">))</span>
<span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">key_nd</span><span class="p">)</span> <span class="o">&gt;</span> <span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">IndexError</span><span class="p">(</span>
<span class="s1">&#39;too many indices (</span><span class="si">{}</span><span class="s1">) for array with </span><span class="si">{}</span><span class="s1"> dimensions&#39;</span>
<span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">key_nd</span><span class="p">),</span> <span class="bp">self</span><span class="o">.</span><span class="n">ndim</span><span class="p">)</span>
<span class="p">)</span>
<span class="n">ndim</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">key_nd</span><span class="p">)</span>
<span class="c1"># --- Preparation --- #</span>
<span class="c1"># - Make lists for bookkeeping of advanced indices &amp; axes</span>
<span class="c1"># - Drop trailing `slice(None)` entries in `key` for efficiency</span>
<span class="c1"># - Determine whether the advanced indices are adjacent in `key`</span>
<span class="c1"># - Depending on that, make index permutations to move around indices</span>
<span class="n">adv_axs</span> <span class="o">=</span> <span class="p">[</span><span class="n">ax</span> <span class="k">for</span> <span class="n">ax</span><span class="p">,</span> <span class="n">idx</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">key</span><span class="p">)</span> <span class="k">if</span> <span class="n">_is_advanced_index</span><span class="p">(</span><span class="n">idx</span><span class="p">)]</span>
<span class="n">adv_axs_nd</span> <span class="o">=</span> <span class="p">[</span><span class="n">ax</span> <span class="k">for</span> <span class="n">ax</span><span class="p">,</span> <span class="n">idx</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">key_nd</span><span class="p">)</span> <span class="k">if</span> <span class="n">_is_advanced_index</span><span class="p">(</span><span class="n">idx</span><span class="p">)]</span>
<span class="n">adv_idcs_are_adjacent</span> <span class="o">=</span> <span class="nb">bool</span><span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">all</span><span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">diff</span><span class="p">(</span><span class="n">adv_axs</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span><span class="p">))</span>
<span class="n">nonadv_axs_nd</span> <span class="o">=</span> <span class="p">[</span><span class="n">ax</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">ndim</span><span class="p">)</span> <span class="k">if</span> <span class="n">ax</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">adv_axs_nd</span><span class="p">]</span>
<span class="n">adv_idcs_nd</span> <span class="o">=</span> <span class="p">[</span><span class="n">key_nd</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">adv_axs_nd</span><span class="p">]</span>
<span class="n">idcs_short</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_drop_slice_none_at_end</span><span class="p">(</span><span class="n">key_nd</span><span class="p">)</span>
<span class="n">dropped_axs</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="nb">range</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">idcs_short</span><span class="p">),</span> <span class="n">ndim</span><span class="p">))</span>
<span class="k">if</span> <span class="n">adv_idcs_are_adjacent</span><span class="p">:</span>
<span class="c1"># The easy case: the advanced block can stay at its position, and no</span>
<span class="c1"># permutation needs to be done (identity permutation)</span>
<span class="n">axs_nd_permut</span> <span class="o">=</span> <span class="n">axs_nd_permut_inv</span> <span class="o">=</span> <span class="nb">tuple</span><span class="p">(</span><span class="nb">range</span><span class="p">(</span><span class="n">ndim</span><span class="p">))</span>
<span class="n">idcs_permut_short</span> <span class="o">=</span> <span class="n">idcs_short</span>
<span class="n">block_axs_nd</span> <span class="o">=</span> <span class="n">adv_axs_nd</span>
<span class="k">else</span><span class="p">:</span>
<span class="c1"># The more complicated case: during broadcasting, we need to use the</span>
<span class="c1"># indices in the *permuted* order, where the advanced block is</span>
<span class="c1"># at the beginning, while the final index for `gather_nd` is stacked</span>
<span class="c1"># in the *original* order, so that the association of index with</span>
<span class="c1"># array axis remains the same.</span>
<span class="c1"># This order is used for broadcasting: advanced block at the beginning</span>
<span class="n">idcs_permut_short</span> <span class="o">=</span> <span class="p">(</span>
<span class="n">adv_idcs_nd</span> <span class="o">+</span>
<span class="p">[</span><span class="n">key_nd</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">ndim</span><span class="p">)</span>
<span class="k">if</span> <span class="n">ax</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">adv_axs_nd</span> <span class="ow">and</span> <span class="n">ax</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">dropped_axs</span><span class="p">]</span>
<span class="p">)</span>
<span class="n">block_axs_nd</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="nb">range</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">adv_axs_nd</span><span class="p">)))</span>
<span class="n">axs_nd_permut</span> <span class="o">=</span> <span class="n">adv_axs_nd</span> <span class="o">+</span> <span class="n">nonadv_axs_nd</span>
<span class="n">axs_nd_permut_inv</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">argsort</span><span class="p">(</span><span class="n">axs_nd_permut</span><span class="p">))</span>
<span class="c1"># --- Conversion, broadcasting and index stacking --- #</span>
<span class="c1"># - Convert all indices in `key` to arrays: integers to 1-element arrays,</span>
<span class="c1"># `slice` objects to arrays with explicit indices</span>
<span class="c1"># - Reshape arrays for broadcasting according to their position in the</span>
<span class="c1"># *permuted* key</span>
<span class="c1"># - Broadcast and stack the indices in the *original* order</span>
<span class="n">shape_nd_permut</span> <span class="o">=</span> <span class="nb">tuple</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">axs_nd_permut</span><span class="p">)</span>
<span class="n">converted_idcs_short</span> <span class="o">=</span> <span class="p">[</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_advanced_index_to_array</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">ax_len</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">ctx</span><span class="p">)</span>
<span class="k">for</span> <span class="n">idx</span><span class="p">,</span> <span class="n">ax_len</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">idcs_permut_short</span><span class="p">,</span> <span class="n">shape_nd_permut</span><span class="p">)</span>
<span class="p">]</span>
<span class="n">bcast_idcs_permut_short</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_broadcast_advanced_indices</span><span class="p">(</span>
<span class="n">converted_idcs_short</span><span class="p">,</span> <span class="n">block_axes</span><span class="o">=</span><span class="n">block_axs_nd</span>
<span class="p">)</span>
<span class="c1"># Get the ndim of advanced indexing subspace</span>
<span class="n">converted_advanced_idcs</span> <span class="o">=</span> <span class="p">[</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_advanced_index_to_array</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">ax_len</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">ctx</span><span class="p">)</span>
<span class="k">for</span> <span class="n">idx</span><span class="p">,</span> <span class="n">ax_len</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">adv_idcs_nd</span><span class="p">,</span> <span class="p">[</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">adv_axs_nd</span><span class="p">])</span>
<span class="p">]</span>
<span class="n">bcast_advanced_shape</span> <span class="o">=</span> <span class="n">_broadcast_shapes</span><span class="p">(</span><span class="n">converted_advanced_idcs</span><span class="p">)</span>
<span class="c1"># Undo the permutation to restore the original order</span>
<span class="n">bcast_idcs_short</span> <span class="o">=</span> <span class="p">[</span>
<span class="n">bcast_idcs_permut_short</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span>
<span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">axs_nd_permut_inv</span>
<span class="k">if</span> <span class="n">axs_nd_permut</span><span class="p">[</span><span class="n">ax</span><span class="p">]</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">dropped_axs</span>
<span class="p">]</span>
<span class="c1"># Calculate where the newaxes are inserted after advanced indexing</span>
<span class="n">new_axes_positions</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_new_axes_after_advanced_indexing</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="n">adv_axs</span><span class="p">,</span>\
<span class="nb">len</span><span class="p">(</span><span class="n">bcast_advanced_shape</span><span class="p">),</span> <span class="n">adv_idcs_are_adjacent</span><span class="p">)</span>
<span class="c1"># if any array is numpy.ndarray, stack in numpy ndarray class.</span>
<span class="k">for</span> <span class="n">idcs</span> <span class="ow">in</span> <span class="n">bcast_idcs_short</span><span class="p">:</span>
<span class="k">if</span> <span class="nb">type</span><span class="p">(</span><span class="n">idcs</span><span class="p">)</span> <span class="o">!=</span> <span class="n">NDArray</span><span class="p">:</span> <span class="c1"># pylint: disable=unidiomatic-typecheck</span>
<span class="k">return</span> <span class="n">bcast_idcs_short</span><span class="p">,</span> <span class="n">new_axes_positions</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">stack</span><span class="p">(</span><span class="o">*</span><span class="n">bcast_idcs_short</span><span class="p">),</span> <span class="n">new_axes_positions</span>
<span class="k">def</span> <span class="nf">_set_nd_advanced_indexing</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">key</span><span class="p">,</span> <span class="n">value</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;This function is called by __setitem__ when key is an advanced index.&quot;&quot;&quot;</span>
<span class="n">indices</span><span class="p">,</span> <span class="n">new_axes</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_index_nd</span><span class="p">(</span><span class="n">key</span><span class="p">)</span>
<span class="n">vshape</span> <span class="o">=</span> <span class="n">get_oshape_of_gather_nd_op</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">indices</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="n">value_nd</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_prepare_value_nd</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">bcast_shape</span><span class="o">=</span><span class="n">vshape</span><span class="p">,</span> <span class="n">squeeze_axes</span><span class="o">=</span><span class="n">new_axes</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_scatter_set_nd</span><span class="p">(</span><span class="n">value_nd</span><span class="p">,</span> <span class="n">indices</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_get_nd_advanced_indexing</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">key</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Get item when key is a tuple of any objects of the following types:</span>
<span class="sd"> NDArray, np.ndarray, list, tuple, slice, and integer.&quot;&quot;&quot;</span>
<span class="n">slc_key</span><span class="p">,</span> <span class="n">new_axes</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_index_nd</span><span class="p">(</span><span class="n">key</span><span class="p">)</span>
<span class="n">sliced</span> <span class="o">=</span> <span class="n">op</span><span class="o">.</span><span class="n">gather_nd</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">slc_key</span><span class="p">)</span>
<span class="c1"># Reshape due to `None` entries in `key`.</span>
<span class="k">if</span> <span class="n">new_axes</span><span class="p">:</span>
<span class="n">final_shape</span> <span class="o">=</span> <span class="p">[</span><span class="n">sliced</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">sliced</span><span class="o">.</span><span class="n">ndim</span><span class="p">)]</span>
<span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="n">new_axes</span><span class="p">:</span> <span class="c1"># pylint: disable=invalid-name</span>
<span class="n">final_shape</span><span class="o">.</span><span class="n">insert</span><span class="p">(</span><span class="n">ax</span><span class="p">,</span> <span class="mi">1</span><span class="p">)</span>
<span class="k">return</span> <span class="n">sliced</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">final_shape</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="n">sliced</span>
<span class="k">def</span> <span class="nf">_sync_copyfrom</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">source_array</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Performs a synchronized copy from the `source_array` to the current array.</span>
<span class="sd"> This is called through ``x[:] = source_array``, where the `source_array`</span>
<span class="sd"> is a `numpy.ndarray` or array-like object.</span>
<span class="sd"> This function blocks until all the pending read/write operations with respect</span>
<span class="sd"> to the current `NDArray` are finished and carry out the copy operation to the</span>
<span class="sd"> current NDArray.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> source_array : array_like</span>
<span class="sd"> The data source we would like to copy from.</span>
<span class="sd"> Example</span>
<span class="sd"> -------</span>
<span class="sd"> &gt;&gt;&gt; a = mx.nd.array([1, 2])</span>
<span class="sd"> &gt;&gt;&gt; a.asnumpy()</span>
<span class="sd"> array([ 1., 2.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; a[:] = np.array([3, 4])</span>
<span class="sd"> &gt;&gt; a.asnumpy()</span>
<span class="sd"> array([ 3., 4.], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">source_array</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">ndarray</span><span class="p">):</span>
<span class="k">try</span><span class="p">:</span>
<span class="n">source_array</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">(</span><span class="n">source_array</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">)</span>
<span class="k">except</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="s1">&#39;array must consist of array-like data,&#39;</span> <span class="o">+</span>
<span class="sa">f</span><span class="s1">&#39;type </span><span class="si">{</span><span class="nb">str</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">array</span><span class="p">))</span><span class="si">}</span><span class="s1"> is not supported&#39;</span><span class="p">)</span>
<span class="n">source_array</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">asarray</span><span class="p">(</span><span class="n">source_array</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">,</span> <span class="n">order</span><span class="o">=</span><span class="s1">&#39;C&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="n">source_array</span><span class="o">.</span><span class="n">shape</span> <span class="o">!=</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="sa">f</span><span class="s1">&#39;Shape inconsistent: expected </span><span class="si">{</span><span class="nb">str</span><span class="p">(</span><span class="n">source_array</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span><span class="si">}</span><span class="s1"> vs got </span><span class="si">{</span><span class="nb">str</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span><span class="si">}</span><span class="s1">&#39;</span><span class="p">)</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArraySyncCopyFromCPU</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span>
<span class="n">source_array</span><span class="o">.</span><span class="n">ctypes</span><span class="o">.</span><span class="n">data_as</span><span class="p">(</span><span class="n">ctypes</span><span class="o">.</span><span class="n">c_void_p</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_size_t</span><span class="p">(</span><span class="n">source_array</span><span class="o">.</span><span class="n">size</span><span class="p">)))</span>
<span class="k">def</span> <span class="nf">_slice</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a sliced NDArray that shares memory with the current one.</span>
<span class="sd"> This is called through ``x[start:stop]``.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> start : int</span>
<span class="sd"> Starting inclusive index of slice in the first dim.</span>
<span class="sd"> stop : int</span>
<span class="sd"> Finishing exclusive index of slice in the first dim.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> `NDArray` sharing the memory with the current one sliced from</span>
<span class="sd"> start to stop in the first dim.</span>
<span class="sd"> Examples:</span>
<span class="sd"> &gt;&gt;&gt; a = mx.nd.array([[1,2], [3, 4], [5, 6], [7, 8]])</span>
<span class="sd"> &gt;&gt;&gt; a[1:2].asnumpy()</span>
<span class="sd"> array([[ 3., 4.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; a[1:1].asnumpy()</span>
<span class="sd"> array([], shape=(0, 2), dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">handle</span> <span class="o">=</span> <span class="n">NDArrayHandle</span><span class="p">()</span>
<span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="p">,</span> <span class="n">_</span> <span class="o">=</span> <span class="n">_get_index_range</span><span class="p">(</span><span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">])</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArraySlice</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">mx_uint</span><span class="p">(</span><span class="n">start</span><span class="p">),</span> <span class="n">mx_uint</span><span class="p">(</span><span class="n">stop</span><span class="p">),</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">handle</span><span class="p">)))</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="p">(</span><span class="n">handle</span><span class="o">=</span><span class="n">handle</span><span class="p">,</span> <span class="n">writable</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">writable</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_at</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">idx</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a view of the array sliced at `idx` in the first dim.</span>
<span class="sd"> This is called through ``x[idx]``.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> idx : int</span>
<span class="sd"> index for slicing the `NDArray` in the first dim.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> `NDArray` sharing the memory with the current one sliced at `idx` in the first dim.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; a = mx.nd.array([[1,2], [3, 4]])</span>
<span class="sd"> &gt;&gt;&gt; a[1].asnumpy()</span>
<span class="sd"> array([ 3., 4.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; b = mx.nd.array([1, 2, 3, 4])</span>
<span class="sd"> &gt;&gt;&gt; b[0].asnumpy()</span>
<span class="sd"> array([ 1.], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">handle</span> <span class="o">=</span> <span class="n">NDArrayHandle</span><span class="p">()</span>
<span class="k">if</span> <span class="n">idx</span> <span class="o">&lt;</span> <span class="mi">0</span><span class="p">:</span>
<span class="n">length</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
<span class="n">idx</span> <span class="o">+=</span> <span class="n">length</span>
<span class="k">if</span> <span class="n">idx</span> <span class="o">&lt;</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">IndexError</span><span class="p">(</span><span class="sa">f</span><span class="s1">&#39;index </span><span class="si">{</span><span class="n">idx</span><span class="o">-</span><span class="n">length</span><span class="si">}</span><span class="s1"> is out of bounds for axis 0 with size </span><span class="si">{</span><span class="n">length</span><span class="si">}</span><span class="s1">&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="n">_int64_enabled</span><span class="p">():</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayAt64</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_int64</span><span class="p">(</span><span class="n">idx</span><span class="p">),</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">handle</span><span class="p">)))</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayAt</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_uint32</span><span class="p">(</span><span class="n">idx</span><span class="p">),</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">handle</span><span class="p">)))</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="p">(</span><span class="n">handle</span><span class="o">=</span><span class="n">handle</span><span class="p">,</span> <span class="n">writable</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">writable</span><span class="p">)</span>
<div class="viewcode-block" id="NDArray.reshape"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.reshape">[docs]</a> <span class="k">def</span> <span class="nf">reshape</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">shape</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a **view** of this array with a new shape without altering any data.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> shape : tuple of int, or n ints</span>
<span class="sd"> The new shape should not change the array size, namely</span>
<span class="sd"> ``np.prod(new_shape)`` should be equal to ``np.prod(self.shape)``.</span>
<span class="sd"> Some dimensions of the shape can take special values from the set {0, -1, -2, -3, -4}.</span>
<span class="sd"> The significance of each is explained below:</span>
<span class="sd"> - ``0`` copy this dimension from the input to the output shape.</span>
<span class="sd"> Example::</span>
<span class="sd"> - input shape = (2,3,4), shape = (4,0,2), output shape = (4,3,2)</span>
<span class="sd"> - input shape = (2,3,4), shape = (2,0,0), output shape = (2,3,4)</span>
<span class="sd"> - ``-1`` infers the dimension of the output shape by using the remainder of the</span>
<span class="sd"> input dimensions keeping the size of the new array same as that of the input array.</span>
<span class="sd"> At most one dimension of shape can be -1.</span>
<span class="sd"> Example::</span>
<span class="sd"> - input shape = (2,3,4), shape = (6,1,-1), output shape = (6,1,4)</span>
<span class="sd"> - input shape = (2,3,4), shape = (3,-1,8), output shape = (3,1,8)</span>
<span class="sd"> - input shape = (2,3,4), shape=(-1,), output shape = (24,)</span>
<span class="sd"> - ``-2`` copy all/remainder of the input dimensions to the output shape.</span>
<span class="sd"> Example::</span>
<span class="sd"> - input shape = (2,3,4), shape = (-2,), output shape = (2,3,4)</span>
<span class="sd"> - input shape = (2,3,4), shape = (2,-2), output shape = (2,3,4)</span>
<span class="sd"> - input shape = (2,3,4), shape = (-2,1,1), output shape = (2,3,4,1,1)</span>
<span class="sd"> - ``-3`` use the product of two consecutive dimensions of the input shape as the</span>
<span class="sd"> output dimension.</span>
<span class="sd"> Example::</span>
<span class="sd"> - input shape = (2,3,4), shape = (-3,4), output shape = (6,4)</span>
<span class="sd"> - input shape = (2,3,4,5), shape = (-3,-3), output shape = (6,20)</span>
<span class="sd"> - input shape = (2,3,4), shape = (0,-3), output shape = (2,12)</span>
<span class="sd"> - input shape = (2,3,4), shape = (-3,-2), output shape = (6,4)</span>
<span class="sd"> - ``-4`` split one dimension of the input into two dimensions passed subsequent to</span>
<span class="sd"> -4 in shape (can contain -1).</span>
<span class="sd"> Example::</span>
<span class="sd"> - input shape = (2,3,4), shape = (-4,1,2,-2), output shape =(1,2,3,4)</span>
<span class="sd"> - input shape = (2,3,4), shape = (2,-4,-1,3,-2), output shape = (2,1,3,4)</span>
<span class="sd"> - If the argument `reverse` is set to 1, then the special values are inferred from right</span>
<span class="sd"> to left.</span>
<span class="sd"> Example::</span>
<span class="sd"> - without reverse=1, for input shape = (10,5,4), shape = (-1,0), output shape would be \</span>
<span class="sd"> (40,5).</span>
<span class="sd"> - with reverse=1, output shape will be (50,4).</span>
<span class="sd"> reverse : bool, default False</span>
<span class="sd"> If true then the special values are inferred from right to left. Only supported as</span>
<span class="sd"> keyword argument.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> An array with desired shape that shares data with this array.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(0,6).reshape(2,3)</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 0., 1., 2.],</span>
<span class="sd"> [ 3., 4., 5.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y = x.reshape(3,2)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0., 1.],</span>
<span class="sd"> [ 2., 3.],</span>
<span class="sd"> [ 4., 5.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y = x.reshape(3,-1)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0., 1.],</span>
<span class="sd"> [ 2., 3.],</span>
<span class="sd"> [ 4., 5.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y = x.reshape(3,2)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0., 1.],</span>
<span class="sd"> [ 2., 3.],</span>
<span class="sd"> [ 4., 5.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y = x.reshape(-3)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([ 0. 1. 2. 3. 4. 5.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y[:] = -1</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[-1., -1., -1.],</span>
<span class="sd"> [-1., -1., -1.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">shape</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span> <span class="ow">and</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="p">(</span><span class="nb">list</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)):</span>
<span class="n">shape</span> <span class="o">=</span> <span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
<span class="k">elif</span> <span class="ow">not</span> <span class="n">shape</span><span class="p">:</span>
<span class="n">shape</span> <span class="o">=</span> <span class="n">kwargs</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">&#39;shape&#39;</span><span class="p">)</span>
<span class="k">assert</span> <span class="n">shape</span><span class="p">,</span> <span class="s2">&quot;Shape must be provided.&quot;</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">all</span><span class="p">(</span><span class="n">k</span> <span class="ow">in</span> <span class="p">[</span><span class="s1">&#39;shape&#39;</span><span class="p">,</span> <span class="s1">&#39;reverse&#39;</span><span class="p">]</span> <span class="k">for</span> <span class="n">k</span> <span class="ow">in</span> <span class="n">kwargs</span><span class="p">):</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span>
<span class="s2">&quot;Got unknown keywords in reshape: </span><span class="si">{}</span><span class="s2">. &quot;</span> \
<span class="s2">&quot;Accepted keyword arguments are &#39;shape&#39; and &#39;reverse&#39;.&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span>
<span class="s1">&#39;, &#39;</span><span class="o">.</span><span class="n">join</span><span class="p">([</span><span class="n">k</span> <span class="k">for</span> <span class="n">k</span> <span class="ow">in</span> <span class="n">kwargs</span> <span class="k">if</span> <span class="n">k</span> <span class="ow">not</span> <span class="ow">in</span> <span class="p">[</span><span class="s1">&#39;shape&#39;</span><span class="p">,</span> <span class="s1">&#39;reverse&#39;</span><span class="p">]])))</span>
<span class="n">reverse</span> <span class="o">=</span> <span class="n">kwargs</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">&#39;reverse&#39;</span><span class="p">,</span> <span class="kc">False</span><span class="p">)</span>
<span class="n">handle</span> <span class="o">=</span> <span class="n">NDArrayHandle</span><span class="p">()</span>
<span class="c1"># Actual reshape</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayReshape64</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span>
<span class="nb">len</span><span class="p">(</span><span class="n">shape</span><span class="p">),</span>
<span class="n">c_array</span><span class="p">(</span><span class="n">ctypes</span><span class="o">.</span><span class="n">c_int64</span><span class="p">,</span> <span class="n">shape</span><span class="p">),</span>
<span class="n">reverse</span><span class="p">,</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">handle</span><span class="p">)))</span>
<span class="n">res</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="p">(</span><span class="n">handle</span><span class="o">=</span><span class="n">handle</span><span class="p">,</span> <span class="n">writable</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">writable</span><span class="p">)</span>
<span class="c1"># Array size should not change</span>
<span class="k">if</span> <span class="n">np</span><span class="o">.</span><span class="n">prod</span><span class="p">(</span><span class="n">res</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="o">!=</span> <span class="n">np</span><span class="o">.</span><span class="n">prod</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">):</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;Cannot reshape array of size </span><span class="si">{}</span><span class="s1"> into shape </span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">prod</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">),</span> <span class="n">shape</span><span class="p">))</span>
<span class="k">return</span> <span class="n">res</span></div>
<div class="viewcode-block" id="NDArray.reshape_like"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.reshape_like">[docs]</a> <span class="k">def</span> <span class="nf">reshape_like</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`reshape_like`.</span>
<span class="sd"> The arguments are the same as for :py:func:`reshape_like`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">reshape_like</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.zeros_like"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.zeros_like">[docs]</a> <span class="k">def</span> <span class="nf">zeros_like</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`zeros_like`.</span>
<span class="sd"> The arguments are the same as for :py:func:`zeros_like`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">zeros_like</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.ones_like"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.ones_like">[docs]</a> <span class="k">def</span> <span class="nf">ones_like</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`ones_like`.</span>
<span class="sd"> The arguments are the same as for :py:func:`ones_like`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">ones_like</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.broadcast_axes"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.broadcast_axes">[docs]</a> <span class="k">def</span> <span class="nf">broadcast_axes</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`broadcast_axes`.</span>
<span class="sd"> The arguments are the same as for :py:func:`broadcast_axes`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">broadcast_axes</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.repeat"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.repeat">[docs]</a> <span class="k">def</span> <span class="nf">repeat</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`repeat`.</span>
<span class="sd"> The arguments are the same as for :py:func:`repeat`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">repeat</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.pad"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.pad">[docs]</a> <span class="k">def</span> <span class="nf">pad</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`pad`.</span>
<span class="sd"> The arguments are the same as for :py:func:`pad`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">pad</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.swapaxes"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.swapaxes">[docs]</a> <span class="k">def</span> <span class="nf">swapaxes</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`swapaxes`.</span>
<span class="sd"> The arguments are the same as for :py:func:`swapaxes`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">swapaxes</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.split"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.split">[docs]</a> <span class="k">def</span> <span class="nf">split</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`split`.</span>
<span class="sd"> The arguments are the same as for :py:func:`split`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.split_v2"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.split_v2">[docs]</a> <span class="k">def</span> <span class="nf">split_v2</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`split_v2`.</span>
<span class="sd"> The arguments are the same as for :py:func:`split_v2`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">split_v2</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.slice"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.slice">[docs]</a> <span class="k">def</span> <span class="nf">slice</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`slice`.</span>
<span class="sd"> The arguments are the same as for :py:func:`slice`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">slice</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.slice_axis"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.slice_axis">[docs]</a> <span class="k">def</span> <span class="nf">slice_axis</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`slice_axis`.</span>
<span class="sd"> The arguments are the same as for :py:func:`slice_axis`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">slice_axis</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.slice_like"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.slice_like">[docs]</a> <span class="k">def</span> <span class="nf">slice_like</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`slice_like`.</span>
<span class="sd"> The arguments are the same as for :py:func:`slice_like`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">slice_like</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.take"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.take">[docs]</a> <span class="k">def</span> <span class="nf">take</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`take`.</span>
<span class="sd"> The arguments are the same as for :py:func:`take`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">take</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.one_hot"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.one_hot">[docs]</a> <span class="k">def</span> <span class="nf">one_hot</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`one_hot`.</span>
<span class="sd"> The arguments are the same as for :py:func:`one_hot`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">one_hot</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.pick"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.pick">[docs]</a> <span class="k">def</span> <span class="nf">pick</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`pick`.</span>
<span class="sd"> The arguments are the same as for :py:func:`pick`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">pick</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.sort"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.sort">[docs]</a> <span class="k">def</span> <span class="nf">sort</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`sort`.</span>
<span class="sd"> The arguments are the same as for :py:func:`sort`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">sort</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.topk"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.topk">[docs]</a> <span class="k">def</span> <span class="nf">topk</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`topk`.</span>
<span class="sd"> The arguments are the same as for :py:func:`topk`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">topk</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.argsort"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.argsort">[docs]</a> <span class="k">def</span> <span class="nf">argsort</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`argsort`.</span>
<span class="sd"> The arguments are the same as for :py:func:`argsort`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">argsort</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.argmax"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.argmax">[docs]</a> <span class="k">def</span> <span class="nf">argmax</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`argmax`.</span>
<span class="sd"> The arguments are the same as for :py:func:`argmax`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">argmax</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.argmax_channel"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.argmax_channel">[docs]</a> <span class="k">def</span> <span class="nf">argmax_channel</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`argmax_channel`.</span>
<span class="sd"> The arguments are the same as for :py:func:`argmax_channel`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">argmax_channel</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.argmin"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.argmin">[docs]</a> <span class="k">def</span> <span class="nf">argmin</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`argmin`.</span>
<span class="sd"> The arguments are the same as for :py:func:`argmin`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">argmin</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.clip"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.clip">[docs]</a> <span class="k">def</span> <span class="nf">clip</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`clip`.</span>
<span class="sd"> The arguments are the same as for :py:func:`clip`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">clip</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.abs"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.abs">[docs]</a> <span class="k">def</span> <span class="nf">abs</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`abs`.</span>
<span class="sd"> The arguments are the same as for :py:func:`abs`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">abs</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.sign"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.sign">[docs]</a> <span class="k">def</span> <span class="nf">sign</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`sign`.</span>
<span class="sd"> The arguments are the same as for :py:func:`sign`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">sign</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.flatten"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.flatten">[docs]</a> <span class="k">def</span> <span class="nf">flatten</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">inplace</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Flatten this array without altering any data.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> inplace : bool, default False</span>
<span class="sd"> If True, this method returns a **view** of this array</span>
<span class="sd"> that shares data with this array. Otherwise, a copy is returned.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> An array with flattened shape `(d1, d2*...*dk)` that shares data with</span>
<span class="sd"> this array with shape `(d1, d2, ..., dk)`.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(30).reshape(5,2,3)</span>
<span class="sd"> &gt;&gt;&gt; y = x.flatten(inplace=True)</span>
<span class="sd"> &gt;&gt;&gt; z = x.flatten()</span>
<span class="sd"> &gt;&gt;&gt; y.shape</span>
<span class="sd"> (5, 6)</span>
<span class="sd"> &gt;&gt;&gt; y[0].asnumpy()</span>
<span class="sd"> array([0., 1., 2., 3., 4., 5.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y[:] = -1</span>
<span class="sd"> &gt;&gt;&gt; x[0].asnumpy()</span>
<span class="sd"> array([[-1., -1., -1.],</span>
<span class="sd"> [-1., -1., -1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z[0].asnumpy()</span>
<span class="sd"> array([0., 1., 2., 3., 4., 5.], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">flatten</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span> <span class="k">if</span> <span class="ow">not</span> <span class="n">inplace</span> <span class="k">else</span> <span class="bp">self</span><span class="o">.</span><span class="n">reshape</span><span class="p">((</span><span class="mi">0</span><span class="p">,</span> <span class="o">-</span><span class="mi">1</span><span class="p">))</span></div>
<div class="viewcode-block" id="NDArray.shape_array"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.shape_array">[docs]</a> <span class="k">def</span> <span class="nf">shape_array</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`shape_array`.</span>
<span class="sd"> The arguments are the same as for :py:func:`shape_array`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">shape_array</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.size_array"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.size_array">[docs]</a> <span class="k">def</span> <span class="nf">size_array</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`size_array`.</span>
<span class="sd"> The arguments are the same as for :py:func:`size_array`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">size_array</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.expand_dims"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.expand_dims">[docs]</a> <span class="k">def</span> <span class="nf">expand_dims</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">axis</span><span class="p">,</span> <span class="n">inplace</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Adds an additional dimension to the current array without altering any data.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> axis : int</span>
<span class="sd"> Position where new axis is to be inserted.</span>
<span class="sd"> Suppose that the input NDArray&#39;s dimension is ndim,</span>
<span class="sd"> the range of the inserted axis is [-ndim, ndim].</span>
<span class="sd"> inplace : bool, default False</span>
<span class="sd"> If True, this method returns a **view** of this array</span>
<span class="sd"> that shares data with this array. Otherwise, a copy is returned.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> An array with expanded shape `(d1, d2, ..., 1, di, ..., dk)`</span>
<span class="sd"> that shares data with this array with shape `(d1, d2, ..., dk)`,</span>
<span class="sd"> given input axis `i`.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(6).reshape(2,3)</span>
<span class="sd"> &gt;&gt;&gt; y = x.expand_dims(1, inplace=True)</span>
<span class="sd"> &gt;&gt;&gt; z = x.expand_dims(1)</span>
<span class="sd"> &gt;&gt;&gt; y.shape</span>
<span class="sd"> (2, 1, 3)</span>
<span class="sd"> &gt;&gt;&gt; y[0].asnumpy()</span>
<span class="sd"> array([[0., 1., 2.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y[:] = -1</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[-1., -1., -1.],</span>
<span class="sd"> [-1., -1., -1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z[0].asnumpy()</span>
<span class="sd"> array([[0., 1., 2.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="ow">not</span> <span class="n">inplace</span><span class="p">:</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">expand_dims</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">axis</span><span class="o">=</span><span class="n">axis</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">new_shape</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="k">assert</span> <span class="o">-</span><span class="nb">len</span><span class="p">(</span><span class="n">new_shape</span><span class="p">)</span><span class="o">-</span><span class="mi">1</span> <span class="o">&lt;=</span> <span class="n">axis</span> <span class="o">&lt;=</span> <span class="nb">len</span><span class="p">(</span><span class="n">new_shape</span><span class="p">),</span> \
<span class="s2">&quot;axis </span><span class="si">{}</span><span class="s2"> is out of range for </span><span class="si">{}</span><span class="s2">d array&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">axis</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">new_shape</span><span class="p">))</span>
<span class="k">if</span> <span class="n">axis</span> <span class="o">&lt;</span> <span class="mi">0</span><span class="p">:</span>
<span class="n">axis</span> <span class="o">+=</span> <span class="nb">len</span><span class="p">(</span><span class="n">new_shape</span><span class="p">)</span> <span class="o">+</span> <span class="mi">1</span>
<span class="n">new_shape</span><span class="o">.</span><span class="n">insert</span><span class="p">(</span><span class="n">axis</span><span class="p">,</span> <span class="mi">1</span><span class="p">)</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">new_shape</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.tile"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.tile">[docs]</a> <span class="k">def</span> <span class="nf">tile</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`tile`.</span>
<span class="sd"> The arguments are the same as for :py:func:`tile`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">tile</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.transpose"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.transpose">[docs]</a> <span class="k">def</span> <span class="nf">transpose</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`transpose`.</span>
<span class="sd"> The arguments are the same as for :py:func:`transpose`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">transpose</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.flip"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.flip">[docs]</a> <span class="k">def</span> <span class="nf">flip</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`flip`.</span>
<span class="sd"> The arguments are the same as for :py:func:`flip`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">flip</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.depth_to_space"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.depth_to_space">[docs]</a> <span class="k">def</span> <span class="nf">depth_to_space</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`depth_to_space`.</span>
<span class="sd"> The arguments are the same as for :py:func:`depth_to_space`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">depth_to_space</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.space_to_depth"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.space_to_depth">[docs]</a> <span class="k">def</span> <span class="nf">space_to_depth</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`space_to_depth`.</span>
<span class="sd"> The arguments are the same as for :py:func:`space_to_depth`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">space_to_depth</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.diag"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.diag">[docs]</a> <span class="k">def</span> <span class="nf">diag</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">k</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`diag`.</span>
<span class="sd"> The arguments are the same as for :py:func:`diag`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">diag</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">k</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.sum"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.sum">[docs]</a> <span class="k">def</span> <span class="nf">sum</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`sum`.</span>
<span class="sd"> The arguments are the same as for :py:func:`sum`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">sum</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.nansum"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.nansum">[docs]</a> <span class="k">def</span> <span class="nf">nansum</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`nansum`.</span>
<span class="sd"> The arguments are the same as for :py:func:`nansum`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">nansum</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.prod"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.prod">[docs]</a> <span class="k">def</span> <span class="nf">prod</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`prod`.</span>
<span class="sd"> The arguments are the same as for :py:func:`prod`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">prod</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.nanprod"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.nanprod">[docs]</a> <span class="k">def</span> <span class="nf">nanprod</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`nanprod`.</span>
<span class="sd"> The arguments are the same as for :py:func:`nanprod`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">nanprod</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.mean"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.mean">[docs]</a> <span class="k">def</span> <span class="nf">mean</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`mean`.</span>
<span class="sd"> The arguments are the same as for :py:func:`mean`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">mean</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.max"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.max">[docs]</a> <span class="k">def</span> <span class="nf">max</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`max`.</span>
<span class="sd"> The arguments are the same as for :py:func:`max`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">max</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.min"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.min">[docs]</a> <span class="k">def</span> <span class="nf">min</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`min`.</span>
<span class="sd"> The arguments are the same as for :py:func:`min`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">min</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.norm"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.norm">[docs]</a> <span class="k">def</span> <span class="nf">norm</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`norm`.</span>
<span class="sd"> The arguments are the same as for :py:func:`norm`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">norm</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.round"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.round">[docs]</a> <span class="k">def</span> <span class="nf">round</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`round`.</span>
<span class="sd"> The arguments are the same as for :py:func:`round`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">round</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.rint"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.rint">[docs]</a> <span class="k">def</span> <span class="nf">rint</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`rint`.</span>
<span class="sd"> The arguments are the same as for :py:func:`rint`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">rint</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.fix"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.fix">[docs]</a> <span class="k">def</span> <span class="nf">fix</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`fix`.</span>
<span class="sd"> The arguments are the same as for :py:func:`fix`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">fix</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.floor"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.floor">[docs]</a> <span class="k">def</span> <span class="nf">floor</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`floor`.</span>
<span class="sd"> The arguments are the same as for :py:func:`floor`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">floor</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.ceil"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.ceil">[docs]</a> <span class="k">def</span> <span class="nf">ceil</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`ceil`.</span>
<span class="sd"> The arguments are the same as for :py:func:`ceil`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">ceil</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.trunc"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.trunc">[docs]</a> <span class="k">def</span> <span class="nf">trunc</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`trunc`.</span>
<span class="sd"> The arguments are the same as for :py:func:`trunc`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">trunc</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.sin"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.sin">[docs]</a> <span class="k">def</span> <span class="nf">sin</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`sin`.</span>
<span class="sd"> The arguments are the same as for :py:func:`sin`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">sin</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.cos"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.cos">[docs]</a> <span class="k">def</span> <span class="nf">cos</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`cos`.</span>
<span class="sd"> The arguments are the same as for :py:func:`cos`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">cos</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.tan"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.tan">[docs]</a> <span class="k">def</span> <span class="nf">tan</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`tan`.</span>
<span class="sd"> The arguments are the same as for :py:func:`tan`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">tan</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.arcsin"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.arcsin">[docs]</a> <span class="k">def</span> <span class="nf">arcsin</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`arcsin`.</span>
<span class="sd"> The arguments are the same as for :py:func:`arcsin`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">arcsin</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.arccos"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.arccos">[docs]</a> <span class="k">def</span> <span class="nf">arccos</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`arccos`.</span>
<span class="sd"> The arguments are the same as for :py:func:`arccos`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">arccos</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.arctan"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.arctan">[docs]</a> <span class="k">def</span> <span class="nf">arctan</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`arctan`.</span>
<span class="sd"> The arguments are the same as for :py:func:`arctan`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">arctan</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.degrees"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.degrees">[docs]</a> <span class="k">def</span> <span class="nf">degrees</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`degrees`.</span>
<span class="sd"> The arguments are the same as for :py:func:`degrees`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">degrees</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.radians"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.radians">[docs]</a> <span class="k">def</span> <span class="nf">radians</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`radians`.</span>
<span class="sd"> The arguments are the same as for :py:func:`radians`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">radians</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.sinh"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.sinh">[docs]</a> <span class="k">def</span> <span class="nf">sinh</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`sinh`.</span>
<span class="sd"> The arguments are the same as for :py:func:`sinh`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">sinh</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.cosh"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.cosh">[docs]</a> <span class="k">def</span> <span class="nf">cosh</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`cosh`.</span>
<span class="sd"> The arguments are the same as for :py:func:`cosh`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">cosh</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.tanh"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.tanh">[docs]</a> <span class="k">def</span> <span class="nf">tanh</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`tanh`.</span>
<span class="sd"> The arguments are the same as for :py:func:`tanh`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">tanh</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.arcsinh"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.arcsinh">[docs]</a> <span class="k">def</span> <span class="nf">arcsinh</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`arcsinh`.</span>
<span class="sd"> The arguments are the same as for :py:func:`arcsinh`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">arcsinh</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.arccosh"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.arccosh">[docs]</a> <span class="k">def</span> <span class="nf">arccosh</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`arccosh`.</span>
<span class="sd"> The arguments are the same as for :py:func:`arccosh`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">arccosh</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.arctanh"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.arctanh">[docs]</a> <span class="k">def</span> <span class="nf">arctanh</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`arctanh`.</span>
<span class="sd"> The arguments are the same as for :py:func:`arctanh`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">arctanh</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.exp"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.exp">[docs]</a> <span class="k">def</span> <span class="nf">exp</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`exp`.</span>
<span class="sd"> The arguments are the same as for :py:func:`exp`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">exp</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.expm1"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.expm1">[docs]</a> <span class="k">def</span> <span class="nf">expm1</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`expm1`.</span>
<span class="sd"> The arguments are the same as for :py:func:`expm1`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">expm1</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.log"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.log">[docs]</a> <span class="k">def</span> <span class="nf">log</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`log`.</span>
<span class="sd"> The arguments are the same as for :py:func:`log`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">log</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.log10"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.log10">[docs]</a> <span class="k">def</span> <span class="nf">log10</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`log10`.</span>
<span class="sd"> The arguments are the same as for :py:func:`log10`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">log10</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.log2"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.log2">[docs]</a> <span class="k">def</span> <span class="nf">log2</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`log2`.</span>
<span class="sd"> The arguments are the same as for :py:func:`log2`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">log2</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.log1p"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.log1p">[docs]</a> <span class="k">def</span> <span class="nf">log1p</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`log1p`.</span>
<span class="sd"> The arguments are the same as for :py:func:`log1p`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">log1p</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.log_sigmoid"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.log_sigmoid">[docs]</a> <span class="k">def</span> <span class="nf">log_sigmoid</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`log_sigmoid`.</span>
<span class="sd"> The arguments are the same as for :py:func:`log_sigmoid`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">log_sigmoid</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.sqrt"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.sqrt">[docs]</a> <span class="k">def</span> <span class="nf">sqrt</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`sqrt`.</span>
<span class="sd"> The arguments are the same as for :py:func:`sqrt`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">sqrt</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.rsqrt"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.rsqrt">[docs]</a> <span class="k">def</span> <span class="nf">rsqrt</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`rsqrt`.</span>
<span class="sd"> The arguments are the same as for :py:func:`rsqrt`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">rsqrt</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.cbrt"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.cbrt">[docs]</a> <span class="k">def</span> <span class="nf">cbrt</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`cbrt`.</span>
<span class="sd"> The arguments are the same as for :py:func:`cbrt`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">cbrt</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.rcbrt"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.rcbrt">[docs]</a> <span class="k">def</span> <span class="nf">rcbrt</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`rcbrt`.</span>
<span class="sd"> The arguments are the same as for :py:func:`rcbrt`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">rcbrt</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.square"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.square">[docs]</a> <span class="k">def</span> <span class="nf">square</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`square`.</span>
<span class="sd"> The arguments are the same as for :py:func:`square`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">square</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.reciprocal"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.reciprocal">[docs]</a> <span class="k">def</span> <span class="nf">reciprocal</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`reciprocal`.</span>
<span class="sd"> The arguments are the same as for :py:func:`reciprocal`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">reciprocal</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.relu"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.relu">[docs]</a> <span class="k">def</span> <span class="nf">relu</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`relu`.</span>
<span class="sd"> The arguments are the same as for :py:func:`relu`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">relu</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.sigmoid"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.sigmoid">[docs]</a> <span class="k">def</span> <span class="nf">sigmoid</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`sigmoid`.</span>
<span class="sd"> The arguments are the same as for :py:func:`sigmoid`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">sigmoid</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.softmax"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.softmax">[docs]</a> <span class="k">def</span> <span class="nf">softmax</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`softmax`.</span>
<span class="sd"> The arguments are the same as for :py:func:`softmax`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">softmax</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.log_softmax"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.log_softmax">[docs]</a> <span class="k">def</span> <span class="nf">log_softmax</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`log_softmax`.</span>
<span class="sd"> The arguments are the same as for :py:func:`log_softmax`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">log_softmax</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.softmin"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.softmin">[docs]</a> <span class="k">def</span> <span class="nf">softmin</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`softmin`.</span>
<span class="sd"> The arguments are the same as for :py:func:`softmin`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">softmin</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.mish"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.mish">[docs]</a> <span class="k">def</span> <span class="nf">mish</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Convenience fluent method for :py:func:`mish`.</span>
<span class="sd"> The arguments are the same as for :py:func:`mish`, with</span>
<span class="sd"> this array as data.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">mish</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.squeeze"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.squeeze">[docs]</a> <span class="k">def</span> <span class="nf">squeeze</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">axis</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">inplace</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Remove dimensions with size 1 from this array without altering any data.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> axis : int, tuple of int, or None</span>
<span class="sd"> Selects a subset of the single-dimensional entries in the shape.</span>
<span class="sd"> If an axis is selected with shape entry greater than one, an error is raised.</span>
<span class="sd"> inplace : bool, default False</span>
<span class="sd"> If True, this method returns a **view** of this array</span>
<span class="sd"> that shares data with this array. Otherwise, a copy is returned.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="ow">not</span> <span class="n">inplace</span><span class="p">:</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">squeeze</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">axis</span><span class="o">=</span><span class="n">axis</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">new_shape</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="n">axes</span> <span class="o">=</span> <span class="n">axis</span> <span class="c1"># rename variable for readability</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">axes</span><span class="p">,</span> <span class="nb">int</span><span class="p">):</span>
<span class="n">axes</span> <span class="o">=</span> <span class="p">[</span><span class="n">axes</span><span class="p">]</span>
<span class="k">if</span> <span class="n">axes</span><span class="p">:</span>
<span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="n">axes</span><span class="p">)</span> <span class="o">==</span> <span class="nb">len</span><span class="p">(</span><span class="nb">set</span><span class="p">(</span><span class="n">axes</span><span class="p">)),</span> \
<span class="s2">&quot;axis </span><span class="si">{}</span><span class="s2"> contains duplicate which is not allowed.&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">axes</span><span class="p">)</span>
<span class="n">resolved_axes</span> <span class="o">=</span> <span class="p">[</span><span class="n">i</span> <span class="k">if</span> <span class="n">i</span> <span class="o">&gt;=</span> <span class="mi">0</span> <span class="k">else</span> <span class="n">i</span><span class="o">+</span><span class="nb">len</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="n">axes</span><span class="p">]</span>
<span class="k">for</span> <span class="n">arg_axis</span><span class="p">,</span> <span class="n">actual_axis</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">axes</span><span class="p">,</span> <span class="n">resolved_axes</span><span class="p">):</span>
<span class="k">assert</span> <span class="o">-</span><span class="nb">len</span><span class="p">(</span><span class="n">new_shape</span><span class="p">)</span> <span class="o">&lt;=</span> <span class="n">arg_axis</span> <span class="o">&lt;</span> <span class="nb">len</span><span class="p">(</span><span class="n">new_shape</span><span class="p">),</span> \
<span class="s2">&quot;axis </span><span class="si">{}</span><span class="s2"> is out of range for </span><span class="si">{}</span><span class="s2">d array&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">arg_axis</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">new_shape</span><span class="p">))</span>
<span class="n">axis_size</span> <span class="o">=</span> <span class="n">new_shape</span><span class="p">[</span><span class="n">actual_axis</span><span class="p">]</span>
<span class="k">assert</span> <span class="n">axis_size</span> <span class="o">==</span> <span class="mi">1</span><span class="p">,</span> \
<span class="s2">&quot;Squeeze target axis </span><span class="si">{}</span><span class="s2"> must be size 1, got </span><span class="si">{}</span><span class="s2">.&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">arg_axis</span><span class="p">,</span> <span class="n">axis_size</span><span class="p">)</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">sorted</span><span class="p">(</span><span class="n">resolved_axes</span><span class="p">,</span> <span class="n">reverse</span><span class="o">=</span><span class="kc">True</span><span class="p">):</span>
<span class="k">del</span> <span class="n">new_shape</span><span class="p">[</span><span class="n">i</span><span class="p">]</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">reversed</span><span class="p">(</span><span class="nb">range</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">new_shape</span><span class="p">))):</span>
<span class="k">if</span> <span class="n">new_shape</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="k">del</span> <span class="n">new_shape</span><span class="p">[</span><span class="n">i</span><span class="p">]</span>
<span class="k">if</span> <span class="ow">not</span> <span class="n">new_shape</span><span class="p">:</span>
<span class="n">new_shape</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">new_shape</span><span class="p">)</span></div>
<span class="c1"># pylint: disable= undefined-variable</span>
<div class="viewcode-block" id="NDArray.broadcast_to"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.broadcast_to">[docs]</a> <span class="k">def</span> <span class="nf">broadcast_to</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">shape</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Broadcasts the input array to a new shape.</span>
<span class="sd"> Broadcasting is only allowed on axes with size 1. The new shape cannot change</span>
<span class="sd"> the number of dimensions.</span>
<span class="sd"> For example, you could broadcast from shape (2, 1) to (2, 3), but not from</span>
<span class="sd"> shape (2, 3) to (2, 3, 3).</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> shape : tuple of int</span>
<span class="sd"> The shape of the desired array.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> A NDArray with the desired shape that is not sharing data with this</span>
<span class="sd"> array, even if the new shape is the same as ``self.shape``.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(0,3).reshape((1,3,1))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[[ 0.],</span>
<span class="sd"> [ 1.],</span>
<span class="sd"> [ 2.]]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y = x.broadcast_to((2,3,3))</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[[ 0., 0., 0.],</span>
<span class="sd"> [ 1., 1., 1.],</span>
<span class="sd"> [ 2., 2., 2.]],</span>
<span class="sd"> &lt;BLANKLINE&gt;</span>
<span class="sd"> [[ 0., 0., 0.],</span>
<span class="sd"> [ 1., 1., 1.],</span>
<span class="sd"> [ 2., 2., 2.]]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">cur_shape</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span>
<span class="n">err_str</span> <span class="o">=</span> <span class="s1">&#39;operands could not be broadcast together with remapped shapes&#39;</span> \
<span class="s1">&#39;[original-&gt;remapped]: </span><span class="si">{}</span><span class="s1"> and requested shape </span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">cur_shape</span><span class="p">,</span> <span class="n">shape</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">shape</span><span class="p">)</span> <span class="o">&lt;</span> <span class="nb">len</span><span class="p">(</span><span class="n">cur_shape</span><span class="p">):</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="n">err_str</span><span class="p">)</span>
<span class="n">cur_shape</span> <span class="o">=</span> <span class="p">(</span><span class="mi">1</span><span class="p">,)</span> <span class="o">*</span> <span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">shape</span><span class="p">)</span> <span class="o">-</span> <span class="nb">len</span><span class="p">(</span><span class="n">cur_shape</span><span class="p">))</span> <span class="o">+</span> <span class="n">cur_shape</span>
<span class="n">cur_shape_arr</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">(</span><span class="n">cur_shape</span><span class="p">)</span>
<span class="n">broadcasting_axes</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">nonzero</span><span class="p">(</span><span class="n">cur_shape_arr</span> <span class="o">!=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">(</span><span class="n">shape</span><span class="p">))</span>
<span class="k">if</span> <span class="p">(</span><span class="n">cur_shape_arr</span><span class="p">[</span><span class="n">broadcasting_axes</span><span class="p">]</span> <span class="o">!=</span> <span class="mi">1</span><span class="p">)</span><span class="o">.</span><span class="n">any</span><span class="p">():</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="n">err_str</span><span class="p">)</span>
<span class="k">if</span> <span class="n">cur_shape</span> <span class="o">!=</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">:</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">broadcast_to</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">cur_shape</span><span class="p">),</span> <span class="n">shape</span><span class="o">=</span><span class="n">shape</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">broadcast_to</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">shape</span><span class="o">=</span><span class="nb">tuple</span><span class="p">(</span><span class="n">shape</span><span class="p">))</span></div>
<span class="c1"># pylint: enable= undefined-variable</span>
<div class="viewcode-block" id="NDArray.broadcast_like"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.broadcast_like">[docs]</a> <span class="k">def</span> <span class="nf">broadcast_like</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Broadcasts the input array to the shape of other.</span>
<span class="sd"> Broadcasting is only allowed on axes with size 1. The new shape cannot change</span>
<span class="sd"> the number of dimensions.</span>
<span class="sd"> For example, you could broadcast from shape (2, 1) to (2, 3), but not from</span>
<span class="sd"> shape (2, 3) to (2, 3, 3).</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> other : NDArray</span>
<span class="sd"> Array with shape of the desired array.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> A NDArray with the desired shape that is not sharing data with this</span>
<span class="sd"> array, even if the new shape is the same as ``self.shape``.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(0,3).reshape((1,3,1))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[[ 0.],</span>
<span class="sd"> [ 1.],</span>
<span class="sd"> [ 2.]]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y = x.broadcast_like(mx.nd.ones((2,3,3)))</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[[ 0., 0., 0.],</span>
<span class="sd"> [ 1., 1., 1.],</span>
<span class="sd"> [ 2., 2., 2.]],</span>
<span class="sd"> &lt;BLANKLINE&gt;</span>
<span class="sd"> [[ 0., 0., 0.],</span>
<span class="sd"> [ 1., 1., 1.],</span>
<span class="sd"> [ 2., 2., 2.]]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">broadcast_to</span><span class="p">(</span><span class="n">other</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.wait_to_read"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.wait_to_read">[docs]</a> <span class="k">def</span> <span class="nf">wait_to_read</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Waits until all previous write operations on the current array are finished.</span>
<span class="sd"> This method guarantees that all previous write operations that pushed</span>
<span class="sd"> into the backend engine for execution are actually finished.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; import time</span>
<span class="sd"> &gt;&gt;&gt; tic = time.time()</span>
<span class="sd"> &gt;&gt;&gt; a = mx.nd.ones((1000,1000))</span>
<span class="sd"> &gt;&gt;&gt; b = mx.nd.dot(a, a)</span>
<span class="sd"> &gt;&gt;&gt; print(time.time() - tic) # doctest: +SKIP</span>
<span class="sd"> 0.003854036331176758</span>
<span class="sd"> &gt;&gt;&gt; b.wait_to_read()</span>
<span class="sd"> &gt;&gt;&gt; print(time.time() - tic) # doctest: +SKIP</span>
<span class="sd"> 0.0893700122833252</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayWaitToRead</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">))</span></div>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">ndim</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns the number of dimensions of this array</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.array([1, 2, 3, 4])</span>
<span class="sd"> &gt;&gt;&gt; x.ndim</span>
<span class="sd"> 1</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.array([[1, 2], [3, 4]])</span>
<span class="sd"> &gt;&gt;&gt; x.ndim</span>
<span class="sd"> 2</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="nb">len</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">shape</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Tuple of array dimensions.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.array([1, 2, 3, 4])</span>
<span class="sd"> &gt;&gt;&gt; x.shape</span>
<span class="sd"> (4L,)</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.zeros((2, 3, 4))</span>
<span class="sd"> &gt;&gt;&gt; y.shape</span>
<span class="sd"> (2L, 3L, 4L)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">ndim</span> <span class="o">=</span> <span class="n">mx_int</span><span class="p">()</span>
<span class="k">if</span> <span class="n">_int64_enabled</span><span class="p">():</span>
<span class="n">pdata</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">POINTER</span><span class="p">(</span><span class="n">mx_int64</span><span class="p">)()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayGetShape64</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">ndim</span><span class="p">),</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">pdata</span><span class="p">)))</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">pdata</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">POINTER</span><span class="p">(</span><span class="n">mx_int</span><span class="p">)()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayGetShape</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">ndim</span><span class="p">),</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">pdata</span><span class="p">)))</span>
<span class="k">if</span> <span class="n">ndim</span><span class="o">.</span><span class="n">value</span> <span class="o">==</span> <span class="o">-</span><span class="mi">1</span><span class="p">:</span>
<span class="k">return</span> <span class="kc">None</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">pdata</span><span class="p">[:</span><span class="n">ndim</span><span class="o">.</span><span class="n">value</span><span class="p">])</span> <span class="c1"># pylint: disable=invalid-slice-index</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">size</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Number of elements in the array.</span>
<span class="sd"> Equivalent to the product of the array&#39;s dimensions.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; import numpy as np</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.zeros((3, 5, 2))</span>
<span class="sd"> &gt;&gt;&gt; x.size</span>
<span class="sd"> 30</span>
<span class="sd"> &gt;&gt;&gt; np.prod(x.shape)</span>
<span class="sd"> 30</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">size</span> <span class="o">=</span> <span class="mi">1</span>
<span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">:</span>
<span class="n">size</span> <span class="o">*=</span> <span class="n">i</span>
<span class="k">return</span> <span class="n">size</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">context</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Device context of the array.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.array([1, 2, 3, 4])</span>
<span class="sd"> &gt;&gt;&gt; x.context</span>
<span class="sd"> cpu(0)</span>
<span class="sd"> &gt;&gt;&gt; type(x.context)</span>
<span class="sd"> &lt;class &#39;mxnet.device.Device&#39;&gt;</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.zeros((2,3), mx.gpu(0))</span>
<span class="sd"> &gt;&gt;&gt; y.context</span>
<span class="sd"> gpu(0)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">dev_typeid</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">()</span>
<span class="n">dev_id</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayGetContext</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">dev_typeid</span><span class="p">),</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">dev_id</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">Device</span><span class="p">(</span><span class="n">Device</span><span class="o">.</span><span class="n">devtype2str</span><span class="p">[</span><span class="n">dev_typeid</span><span class="o">.</span><span class="n">value</span><span class="p">],</span> <span class="n">dev_id</span><span class="o">.</span><span class="n">value</span><span class="p">)</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">ctx</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Device context of the array. Has the same meaning as context.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.array([1, 2, 3, 4])</span>
<span class="sd"> &gt;&gt;&gt; x.ctx</span>
<span class="sd"> cpu(0)</span>
<span class="sd"> &gt;&gt;&gt; type(x.ctx)</span>
<span class="sd"> &lt;class &#39;mxnet.context.Context&#39;&gt;</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.zeros((2,3), mx.gpu(0))</span>
<span class="sd"> &gt;&gt;&gt; y.ctx</span>
<span class="sd"> gpu(0)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">context</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">device</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Device context of the array. Has the same meaning as context.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.array([1, 2, 3, 4])</span>
<span class="sd"> &gt;&gt;&gt; x.device</span>
<span class="sd"> cpu(0)</span>
<span class="sd"> &gt;&gt;&gt; type(x.device)</span>
<span class="sd"> &lt;class &#39;mxnet.device.Device&#39;&gt;</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.zeros((2,3), mx.gpu(0))</span>
<span class="sd"> &gt;&gt;&gt; y.device</span>
<span class="sd"> gpu(0)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">context</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">dtype</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Data-type of the array&#39;s elements.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> numpy.dtype</span>
<span class="sd"> This NDArray&#39;s data type.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.zeros((2,3))</span>
<span class="sd"> &gt;&gt;&gt; x.dtype</span>
<span class="sd"> &lt;type &#39;numpy.float32&#39;&gt;</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.zeros((2,3), dtype=&#39;int32&#39;)</span>
<span class="sd"> &gt;&gt;&gt; y.dtype</span>
<span class="sd"> &lt;type &#39;numpy.int32&#39;&gt;</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">mx_dtype</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayGetDType</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">mx_dtype</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">dtype_mx_to_np</span><span class="p">(</span><span class="n">mx_dtype</span><span class="o">.</span><span class="n">value</span><span class="p">)</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">stype</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Storage-type of the array.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">_STORAGE_TYPE_ID_TO_STR</span><span class="p">[</span><span class="n">_storage_type</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">)]</span>
<span class="nd">@property</span>
<span class="c1"># pylint: disable= invalid-name, undefined-variable</span>
<span class="k">def</span> <span class="nf">T</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a copy of the array with axes transposed.</span>
<span class="sd"> Equivalent to ``mx.nd.transpose(self)`` except that</span>
<span class="sd"> self is returned if ``self.ndim &lt; 2``.</span>
<span class="sd"> Unlike ``numpy.ndarray.T``, this function returns a copy</span>
<span class="sd"> rather than a view of the array unless ``self.ndim &lt; 2``.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.arange(0,6).reshape((2,3))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 0., 1., 2.],</span>
<span class="sd"> [ 3., 4., 5.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x.T.asnumpy()</span>
<span class="sd"> array([[ 0., 3.],</span>
<span class="sd"> [ 1., 4.],</span>
<span class="sd"> [ 2., 5.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="o">&lt;</span> <span class="mi">2</span><span class="p">:</span>
<span class="k">return</span> <span class="bp">self</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">transpose</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span>
<span class="c1"># pylint: enable= invalid-name, undefined-variable</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">_fresh_grad</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Whether this array&#39;s corresponding gradient array</span>
<span class="sd"> (registered via `autograd.mark_variables`) has been</span>
<span class="sd"> updated by `autograd.backward` since last reset.</span>
<span class="sd"> `_fresh_grad` need to be manually set to False</span>
<span class="sd"> after consuming gradient (usually after updating this</span>
<span class="sd"> array).</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">out</span> <span class="o">=</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayGetGradState</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">out</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">out</span><span class="o">.</span><span class="n">value</span>
<span class="nd">@_fresh_grad</span><span class="o">.</span><span class="n">setter</span>
<span class="k">def</span> <span class="nf">_fresh_grad</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">state</span><span class="p">):</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArraySetGradState</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="n">state</span><span class="p">)))</span>
<div class="viewcode-block" id="NDArray.asnumpy"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.asnumpy">[docs]</a> <span class="k">def</span> <span class="nf">asnumpy</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a ``numpy.ndarray`` object with value copied from this array.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = x.asnumpy()</span>
<span class="sd"> &gt;&gt;&gt; type(y)</span>
<span class="sd"> &lt;type &#39;numpy.ndarray&#39;&gt;</span>
<span class="sd"> &gt;&gt;&gt; y</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.ones((2,3), dtype=&#39;int32&#39;)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[1, 1, 1],</span>
<span class="sd"> [1, 1, 1]], dtype=int32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">dtype</span> <span class="o">==</span> <span class="n">bfloat16</span><span class="p">:</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="n">np</span><span class="o">.</span><span class="n">float32</span><span class="p">)</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="n">data</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">empty</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">)</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArraySyncCopyToCPU</span><span class="p">(</span>
<span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span>
<span class="n">data</span><span class="o">.</span><span class="n">ctypes</span><span class="o">.</span><span class="n">data_as</span><span class="p">(</span><span class="n">ctypes</span><span class="o">.</span><span class="n">c_void_p</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_size_t</span><span class="p">(</span><span class="n">data</span><span class="o">.</span><span class="n">size</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">data</span></div>
<div class="viewcode-block" id="NDArray.asscalar"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.asscalar">[docs]</a> <span class="k">def</span> <span class="nf">asscalar</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a scalar whose value is copied from this array.</span>
<span class="sd"> This function is equivalent to ``self.asnumpy()[0]``. This NDArray must have shape (1,).</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((1,), dtype=&#39;int32&#39;)</span>
<span class="sd"> &gt;&gt;&gt; x.asscalar()</span>
<span class="sd"> 1</span>
<span class="sd"> &gt;&gt;&gt; type(x.asscalar())</span>
<span class="sd"> &lt;type &#39;numpy.int32&#39;&gt;</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">size</span> <span class="o">!=</span> <span class="mi">1</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;The current array is not a scalar&quot;</span><span class="p">)</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">ndim</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()[</span><span class="mi">0</span><span class="p">]</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()[()]</span></div>
<div class="viewcode-block" id="NDArray.astype"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.astype">[docs]</a> <span class="k">def</span> <span class="nf">astype</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">dtype</span><span class="p">,</span> <span class="n">copy</span><span class="o">=</span><span class="kc">True</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a copy of the array after casting to a specified type.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> dtype : numpy.dtype or str</span>
<span class="sd"> The type of the returned array.</span>
<span class="sd"> copy : bool</span>
<span class="sd"> Default `True`. By default, astype always returns a newly</span>
<span class="sd"> allocated ndarray on the same context. If this is set to</span>
<span class="sd"> `False`, and the dtype requested is the same as the ndarray&#39;s</span>
<span class="sd"> dtype, the ndarray is returned instead of a copy.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray, CSRNDArray or RowSparseNDArray</span>
<span class="sd"> The copied array after casting to the specified type, or</span>
<span class="sd"> the same array if copy=False and dtype is the same as the input</span>
<span class="sd"> array.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.zeros((2,3), dtype=&#39;float32&#39;)</span>
<span class="sd"> &gt;&gt;&gt; y = x.astype(&#39;int32&#39;)</span>
<span class="sd"> &gt;&gt;&gt; y.dtype</span>
<span class="sd"> &lt;type &#39;numpy.int32&#39;&gt;</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="n">dtype</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">dtype</span> <span class="o">=</span> <span class="n">mx_real_t</span>
<span class="k">if</span> <span class="ow">not</span> <span class="n">copy</span> <span class="ow">and</span> <span class="n">np</span><span class="o">.</span><span class="n">dtype</span><span class="p">(</span><span class="n">dtype</span><span class="p">)</span> <span class="o">==</span> <span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">:</span>
<span class="k">return</span> <span class="bp">self</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">cast</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">dtype</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.copyto"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.copyto">[docs]</a> <span class="k">def</span> <span class="nf">copyto</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">other</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Copies the value of this array to another array.</span>
<span class="sd"> If ``other`` is a ``NDArray`` object, then ``other.shape`` and</span>
<span class="sd"> ``self.shape`` should be the same. This function copies the value from</span>
<span class="sd"> ``self`` to ``other``.</span>
<span class="sd"> If ``other`` is a context, a new ``NDArray`` will be first created on</span>
<span class="sd"> the target context, and the value of ``self`` is copied.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> other : NDArray or Context</span>
<span class="sd"> The destination array or context.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray, CSRNDArray or RowSparseNDArray</span>
<span class="sd"> The copied array. If ``other`` is an ``NDArray``, then the return value</span>
<span class="sd"> and ``other`` will point to the same ``NDArray``.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.zeros((2,3), mx.gpu(0))</span>
<span class="sd"> &gt;&gt;&gt; z = x.copyto(y)</span>
<span class="sd"> &gt;&gt;&gt; z is y</span>
<span class="sd"> True</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.copyto(mx.gpu(0))</span>
<span class="sd"> &lt;NDArray 2x3 @gpu(0)&gt;</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">):</span>
<span class="k">if</span> <span class="n">other</span><span class="o">.</span><span class="n">handle</span> <span class="ow">is</span> <span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">:</span>
<span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">&#39;You are attempting to copy an array to itself&#39;</span><span class="p">,</span> <span class="ne">RuntimeWarning</span><span class="p">)</span>
<span class="k">return</span> <span class="kc">False</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_copyto</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="n">other</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">other</span><span class="p">,</span> <span class="n">Device</span><span class="p">):</span>
<span class="n">hret</span> <span class="o">=</span> <span class="n">NDArray</span><span class="p">(</span><span class="n">_new_alloc_handle</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">other</span><span class="p">,</span> <span class="kc">True</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">))</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_copyto</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="n">hret</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="s1">&#39;copyto does not support type &#39;</span> <span class="o">+</span> <span class="nb">str</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">other</span><span class="p">)))</span></div>
<div class="viewcode-block" id="NDArray.copy"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.copy">[docs]</a> <span class="k">def</span> <span class="nf">copy</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Makes a copy of this ``NDArray``, keeping the same context.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray, CSRNDArray or RowSparseNDArray</span>
<span class="sd"> The copied array</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = x.copy()</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">copyto</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">ctx</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.slice_assign_scalar"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.slice_assign_scalar">[docs]</a> <span class="k">def</span> <span class="nf">slice_assign_scalar</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">value</span><span class="p">,</span> <span class="n">begin</span><span class="p">,</span> <span class="n">end</span><span class="p">,</span> <span class="n">step</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Assign the scalar to a cropped subset of this NDArray. Value will broadcast to the shape of the cropped shape</span>
<span class="sd"> and will be cast to the same dtype of the NDArray.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> value: numeric value</span>
<span class="sd"> Value and this NDArray should be of the same data type.</span>
<span class="sd"> The shape of rhs should be the same as the cropped shape of this NDArray.</span>
<span class="sd"> begin: tuple of begin indices</span>
<span class="sd"> end: tuple of end indices</span>
<span class="sd"> step: tuple of step lenghths</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> This NDArray.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; from mxnet import nd</span>
<span class="sd"> &gt;&gt;&gt; x = nd.ones((2, 2, 2))</span>
<span class="sd"> &gt;&gt;&gt; y = x.slice_assign_scalar(0, (0, 0, None), (1, 1, None), (None, None, None))</span>
<span class="sd"> &gt;&gt;&gt; y</span>
<span class="sd"> [[[0. 0.]</span>
<span class="sd"> [1. 1.]]</span>
<span class="sd"> [[1. 1.]</span>
<span class="sd"> [1. 1.]]]</span>
<span class="sd"> &lt;NDArray 2x2x2 @cpu(0)&gt;</span>
<span class="sd"> &gt;&gt;&gt; x</span>
<span class="sd"> [[[0. 0.]</span>
<span class="sd"> [1. 1.]]</span>
<span class="sd"> [[1. 1.]</span>
<span class="sd"> [1. 1.]]]</span>
<span class="sd"> &lt;NDArray 2x2x2 @cpu(0)&gt;</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_slice_assign_scalar</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">value</span><span class="p">,</span> <span class="n">begin</span><span class="o">=</span><span class="n">begin</span><span class="p">,</span> <span class="n">end</span><span class="o">=</span><span class="n">end</span><span class="p">,</span> <span class="n">step</span><span class="o">=</span><span class="n">step</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.slice_assign"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.slice_assign">[docs]</a> <span class="k">def</span> <span class="nf">slice_assign</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">rhs</span><span class="p">,</span> <span class="n">begin</span><span class="p">,</span> <span class="n">end</span><span class="p">,</span> <span class="n">step</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Assign the rhs to a cropped subset of this NDarray in place.</span>
<span class="sd"> Returns the view of this NDArray.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> rhs: NDArray.</span>
<span class="sd"> rhs and this NDArray should be of the same data type, and on the same device.</span>
<span class="sd"> The shape of rhs should be the same as the cropped shape of this NDArray.</span>
<span class="sd"> begin: tuple of begin indices</span>
<span class="sd"> end: tuple of end indices</span>
<span class="sd"> step: tuple of step lenghths</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> This NDArray.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = nd.ones((2, 2, 2))</span>
<span class="sd"> &gt;&gt;&gt; assigned = nd.zeros((1, 1, 2))</span>
<span class="sd"> &gt;&gt;&gt; y = x.slice_assign(assigned, (0, 0, None), (1, 1, None), (None, None, None))</span>
<span class="sd"> &gt;&gt;&gt; y</span>
<span class="sd"> [[[0. 0.]</span>
<span class="sd"> [1. 1.]]</span>
<span class="sd"> [[1. 1.]</span>
<span class="sd"> [1. 1.]]]</span>
<span class="sd"> &lt;NDArray 2x2x2 @cpu(0)&gt;</span>
<span class="sd"> &gt;&gt;&gt; x</span>
<span class="sd"> [[[0. 0.]</span>
<span class="sd"> [1. 1.]]</span>
<span class="sd"> [[1. 1.]</span>
<span class="sd"> [1. 1.]]]</span>
<span class="sd"> &lt;NDArray 2x2x2 @cpu(0)&gt;</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_slice_assign</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">rhs</span><span class="p">,</span> <span class="n">begin</span><span class="o">=</span><span class="n">begin</span><span class="p">,</span> <span class="n">end</span><span class="o">=</span><span class="n">end</span><span class="p">,</span> <span class="n">step</span><span class="o">=</span><span class="n">step</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.as_in_context"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.as_in_context">[docs]</a> <span class="k">def</span> <span class="nf">as_in_context</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">context</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns an array on the target device with the same value as this array.</span>
<span class="sd"> If the target context is the same as ``self.context``, then ``self`` is</span>
<span class="sd"> returned. Otherwise, a copy is made.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> context : Context</span>
<span class="sd"> The target context.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray, CSRNDArray or RowSparseNDArray</span>
<span class="sd"> The target array.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = x.as_in_context(mx.cpu())</span>
<span class="sd"> &gt;&gt;&gt; y is x</span>
<span class="sd"> True</span>
<span class="sd"> &gt;&gt;&gt; z = x.as_in_context(mx.gpu(0))</span>
<span class="sd"> &gt;&gt;&gt; z is x</span>
<span class="sd"> False</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">context</span> <span class="o">==</span> <span class="n">context</span><span class="p">:</span>
<span class="k">return</span> <span class="bp">self</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">copyto</span><span class="p">(</span><span class="n">context</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.attach_grad"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.attach_grad">[docs]</a> <span class="k">def</span> <span class="nf">attach_grad</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">grad_req</span><span class="o">=</span><span class="s1">&#39;write&#39;</span><span class="p">,</span> <span class="n">stype</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Attach a gradient buffer to this NDArray, so that `backward`</span>
<span class="sd"> can compute gradient with respect to it.</span>
<span class="sd"> The gradient is initialized to zeros.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> grad_req : {&#39;write&#39;, &#39;add&#39;, &#39;null&#39;}</span>
<span class="sd"> How gradient will be accumulated.</span>
<span class="sd"> - &#39;write&#39;: gradient will be overwritten on every backward.</span>
<span class="sd"> - &#39;add&#39;: gradient will be added to existing value on every backward.</span>
<span class="sd"> - &#39;null&#39;: do not compute gradient for this NDArray.</span>
<span class="sd"> stype : str, optional</span>
<span class="sd"> The storage type of the gradient array. Defaults to the same stype of this NDArray.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="kn">from</span> <span class="nn">.</span> <span class="kn">import</span> <span class="n">zeros</span> <span class="k">as</span> <span class="n">_zeros</span>
<span class="k">if</span> <span class="n">stype</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">grad</span> <span class="o">=</span> <span class="n">_zeros</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">stype</span><span class="o">=</span><span class="n">stype</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">grad</span> <span class="o">=</span> <span class="n">op</span><span class="o">.</span><span class="n">zeros_like</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span> <span class="c1"># pylint: disable=undefined-variable</span>
<span class="n">grad_req</span> <span class="o">=</span> <span class="n">_GRAD_REQ_MAP</span><span class="p">[</span><span class="n">grad_req</span><span class="p">]</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXAutogradMarkVariables</span><span class="p">(</span>
<span class="mi">1</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">pointer</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">pointer</span><span class="p">(</span><span class="n">mx_uint</span><span class="p">(</span><span class="n">grad_req</span><span class="p">)),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">pointer</span><span class="p">(</span><span class="n">grad</span><span class="o">.</span><span class="n">handle</span><span class="p">)))</span></div>
<div class="viewcode-block" id="NDArray.drop_grad"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.drop_grad">[docs]</a> <span class="k">def</span> <span class="nf">drop_grad</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Free the memory of the marked ndarray.&quot;&quot;&quot;</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXAutogradDropGrads</span><span class="p">(</span>
<span class="mi">1</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">pointer</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">)))</span></div>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">grad</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns gradient buffer attached to this NDArray.&quot;&quot;&quot;</span>
<span class="kn">from</span> <span class="nn">.</span> <span class="kn">import</span> <span class="n">_ndarray_cls</span>
<span class="n">hdl</span> <span class="o">=</span> <span class="n">NDArrayHandle</span><span class="p">()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayGetGrad</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">hdl</span><span class="p">)))</span>
<span class="k">if</span> <span class="n">hdl</span><span class="o">.</span><span class="n">value</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">return</span> <span class="kc">None</span>
<span class="k">return</span> <span class="n">_ndarray_cls</span><span class="p">(</span><span class="n">hdl</span><span class="p">)</span>
<div class="viewcode-block" id="NDArray.detach"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.detach">[docs]</a> <span class="k">def</span> <span class="nf">detach</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a new NDArray, detached from the current graph.&quot;&quot;&quot;</span>
<span class="kn">from</span> <span class="nn">.</span> <span class="kn">import</span> <span class="n">_ndarray_cls</span>
<span class="n">hdl</span> <span class="o">=</span> <span class="n">NDArrayHandle</span><span class="p">()</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXNDArrayDetach</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">handle</span><span class="p">,</span> <span class="n">ctypes</span><span class="o">.</span><span class="n">byref</span><span class="p">(</span><span class="n">hdl</span><span class="p">)))</span>
<span class="k">return</span> <span class="n">_ndarray_cls</span><span class="p">(</span><span class="n">hdl</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.backward"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.backward">[docs]</a> <span class="k">def</span> <span class="nf">backward</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">out_grad</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">retain_graph</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">train_mode</span><span class="o">=</span><span class="kc">True</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Compute the gradients of this NDArray w.r.t variables.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> out_grad : NDArray, optional</span>
<span class="sd"> Gradient with respect to head.</span>
<span class="sd"> retain_graph : bool, optional</span>
<span class="sd"> Whether to retain the computaion graph for another backward</span>
<span class="sd"> pass on the same graph. By default the computaion history</span>
<span class="sd"> is cleared.</span>
<span class="sd"> train_mode : bool, optional</span>
<span class="sd"> Whether to compute gradient for training or inference.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="n">out_grad</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">ograd_handles</span> <span class="o">=</span> <span class="p">[</span><span class="n">NDArrayHandle</span><span class="p">(</span><span class="mi">0</span><span class="p">)]</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">ograd_handles</span> <span class="o">=</span> <span class="p">[</span><span class="n">out_grad</span><span class="o">.</span><span class="n">handle</span><span class="p">]</span>
<span class="n">check_call</span><span class="p">(</span><span class="n">_LIB</span><span class="o">.</span><span class="n">MXAutogradBackwardEx</span><span class="p">(</span>
<span class="mi">1</span><span class="p">,</span> <span class="n">c_handle_array</span><span class="p">([</span><span class="bp">self</span><span class="p">]),</span>
<span class="n">c_array</span><span class="p">(</span><span class="n">NDArrayHandle</span><span class="p">,</span> <span class="n">ograd_handles</span><span class="p">),</span>
<span class="mi">0</span><span class="p">,</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_void_p</span><span class="p">(</span><span class="mi">0</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="n">retain_graph</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="mi">0</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_int</span><span class="p">(</span><span class="n">train_mode</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_void_p</span><span class="p">(</span><span class="mi">0</span><span class="p">),</span>
<span class="n">ctypes</span><span class="o">.</span><span class="n">c_void_p</span><span class="p">(</span><span class="mi">0</span><span class="p">)))</span></div>
<div class="viewcode-block" id="NDArray.tostype"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.tostype">[docs]</a> <span class="k">def</span> <span class="nf">tostype</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">stype</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return a copy of the array with chosen storage type.</span>
<span class="sd"> See Also</span>
<span class="sd"> ----------</span>
<span class="sd"> :meth:`mxnet.ndarray.cast_storage`.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray, CSRNDArray or RowSparseNDArray</span>
<span class="sd"> A copy of the array with the chosen storage stype</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="n">stype</span> <span class="o">==</span> <span class="s1">&#39;csr&#39;</span> <span class="ow">and</span> <span class="nb">len</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="o">!=</span> <span class="mi">2</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;To convert to a CSR, the NDArray should be 2 Dimensional. Current &quot;</span>
<span class="sa">f</span><span class="s2">&quot;shape is </span><span class="si">{</span><span class="nb">str</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span><span class="si">}</span><span class="s2">&quot;</span><span class="p">)</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">cast_storage</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">stype</span><span class="o">=</span><span class="n">stype</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.to_dlpack_for_read"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.to_dlpack_for_read">[docs]</a> <span class="k">def</span> <span class="nf">to_dlpack_for_read</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a reference view of NDArray that represents as DLManagedTensor until</span>
<span class="sd"> all previous write operations on the current array are finished.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> PyCapsule (the pointer of DLManagedTensor)</span>
<span class="sd"> a reference view of NDArray that represents as DLManagedTensor.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.to_dlpack_for_read(x)</span>
<span class="sd"> &gt;&gt;&gt; type(y)</span>
<span class="sd"> &lt;class &#39;PyCapsule&#39;&gt;</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.from_dlpack(y)</span>
<span class="sd"> &gt;&gt;&gt; z</span>
<span class="sd"> [[1. 1. 1.]</span>
<span class="sd"> [1. 1. 1.]]</span>
<span class="sd"> &lt;NDArray 2x3 @cpu(0)&gt;</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">to_dlpack_for_read</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span></div>
<div class="viewcode-block" id="NDArray.to_dlpack_for_write"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.NDArray.to_dlpack_for_write">[docs]</a> <span class="k">def</span> <span class="nf">to_dlpack_for_write</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a reference view of NDArray that represents as DLManagedTensor until</span>
<span class="sd"> all previous read/write operations on the current array are finished.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> PyCapsule (the pointer of DLManagedTensor)</span>
<span class="sd"> a reference view of NDArray that represents as DLManagedTensor.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; w = mx.nd.to_dlpack_for_write(x)</span>
<span class="sd"> &gt;&gt;&gt; type(w)</span>
<span class="sd"> &lt;class &#39;PyCapsule&#39;&gt;</span>
<span class="sd"> &gt;&gt;&gt; u = mx.nd.from_dlpack(w)</span>
<span class="sd"> &gt;&gt;&gt; u += 1</span>
<span class="sd"> &gt;&gt;&gt; x</span>
<span class="sd"> [[2. 2. 2.]</span>
<span class="sd"> [2. 2. 2.]]</span>
<span class="sd"> &lt;NDArray 2x3 @cpu(0)&gt;</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">to_dlpack_for_write</span><span class="p">(</span><span class="bp">self</span><span class="p">)</span></div>
<span class="k">def</span> <span class="nf">_full</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">value</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> This is added as an NDArray class method in order to support polymorphism in NDArray and numpy.ndarray indexing</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_full</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">value</span><span class="o">=</span><span class="n">value</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_scatter_set_nd</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">value_nd</span><span class="p">,</span> <span class="n">indices</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> This is added as an NDArray class method in order to support polymorphism in NDArray and numpy.ndarray indexing</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_scatter_set_nd</span><span class="p">(</span>
<span class="n">lhs</span><span class="o">=</span><span class="bp">self</span><span class="p">,</span> <span class="n">rhs</span><span class="o">=</span><span class="n">value_nd</span><span class="p">,</span> <span class="n">indices</span><span class="o">=</span><span class="n">indices</span><span class="p">,</span> <span class="n">shape</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="bp">self</span>
<span class="p">)</span></div>
<span class="k">def</span> <span class="nf">check_boolean_array_dimension</span><span class="p">(</span><span class="n">array_shape</span><span class="p">,</span> <span class="n">axis</span><span class="p">,</span> <span class="n">bool_shape</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Advanced boolean indexing is implemented through the use of `nonzero`.</span>
<span class="sd"> Size check is necessary to make sure that the boolean array</span>
<span class="sd"> has exactly as many dimensions as it is supposed to work with before the conversion</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="n">val</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">bool_shape</span><span class="p">):</span>
<span class="k">if</span> <span class="n">array_shape</span><span class="p">[</span><span class="n">axis</span> <span class="o">+</span> <span class="n">i</span><span class="p">]</span> <span class="o">!=</span> <span class="n">val</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">IndexError</span><span class="p">(</span><span class="s1">&#39;boolean index did not match indexed array along axis </span><span class="si">{}</span><span class="s1">;&#39;</span>
<span class="s1">&#39; size is </span><span class="si">{}</span><span class="s1"> but corresponding boolean size is </span><span class="si">{}</span><span class="s1">&#39;</span>
<span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">axis</span> <span class="o">+</span> <span class="n">i</span><span class="p">,</span> <span class="n">array_shape</span><span class="p">[</span><span class="n">axis</span> <span class="o">+</span> <span class="n">i</span><span class="p">],</span> <span class="n">val</span><span class="p">))</span>
<div class="viewcode-block" id="indexing_key_expand_implicit_axes"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.indexing_key_expand_implicit_axes">[docs]</a><span class="k">def</span> <span class="nf">indexing_key_expand_implicit_axes</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="n">shape</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;</span>
<span class="sd"> Make implicit axes explicit by adding ``slice(None)``</span>
<span class="sd"> and convert boolean array to integer array through `nonzero`.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; shape = (3, 4, 5)</span>
<span class="sd"> &gt;&gt;&gt; indexing_key_expand_implicit_axes(np.s_[2, 1, 1], shape)</span>
<span class="sd"> (2, 1, 1)</span>
<span class="sd"> &gt;&gt;&gt; indexing_key_expand_implicit_axes(np.s_[0], shape)</span>
<span class="sd"> (0, slice(None, None, None), slice(None, None, None))</span>
<span class="sd"> &gt;&gt;&gt; indexing_key_expand_implicit_axes(np.s_[0, ...], shape) # equivalent</span>
<span class="sd"> (0, slice(None, None, None), slice(None, None, None))</span>
<span class="sd"> &gt;&gt;&gt; indexing_key_expand_implicit_axes(np.s_[:2, None, 0, ...], shape)</span>
<span class="sd"> (slice(None, 2, None), None, 0, slice(None, None, None))</span>
<span class="sd"> &gt;&gt;&gt; bool_array = np.array([[True, False, True, False],</span>
<span class="sd"> [False, True, False, True],</span>
<span class="sd"> [True, False, True, False]], dtype=np.bool)</span>
<span class="sd"> &gt;&gt;&gt; indexing_key_expand_implicit_axes(np.s_[bool_array, None, 0:2], shape)</span>
<span class="sd"> (array([0, 0, 1, 1, 2, 2], dtype=int64), array([0, 2, 1, 3, 0, 2], dtype=int64), None, slice(None, 2, None))</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">):</span>
<span class="n">key</span> <span class="o">=</span> <span class="p">(</span><span class="n">key</span><span class="p">,)</span>
<span class="c1"># We need to loop explicitly since tuple functions like `index()` or</span>
<span class="c1"># `count()` use `==` internally, which doesn&#39;t play well with fancy</span>
<span class="c1"># indexing.</span>
<span class="n">ell_idx</span> <span class="o">=</span> <span class="kc">None</span>
<span class="n">num_none</span> <span class="o">=</span> <span class="mi">0</span>
<span class="n">nonell_key</span> <span class="o">=</span> <span class="p">[]</span>
<span class="c1"># For 0-d boolean indices: A new axis is added,</span>
<span class="c1"># but at the same time no axis is &quot;used&quot;. So if we have True,</span>
<span class="c1"># we add a new axis (a bit like with np.newaxis). If it is</span>
<span class="c1"># False, we add a new axis, but this axis has 0 entries.</span>
<span class="c1"># prepend is defined to handle this case.</span>
<span class="c1"># prepend = _NDARRAY_NO_ZERO_DIM_BOOL_ARRAY/-1 means there is no 0-d boolean scalar</span>
<span class="c1"># prepend = _NDARRAY_ZERO_DIM_BOOL_ARRAY_FALSE/0 means an zero dim must be expanded</span>
<span class="c1"># prepend = _NDARRAY_ZERO_DIM_BOOL_ARRAY_TRUE/1 means a new axis must be expanded</span>
<span class="n">prepend</span> <span class="o">=</span> <span class="n">_NDARRAY_NO_ZERO_DIM_BOOL_ARRAY</span>
<span class="n">axis</span> <span class="o">=</span> <span class="mi">0</span>
<span class="k">for</span> <span class="n">i</span><span class="p">,</span> <span class="n">idx</span> <span class="ow">in</span> <span class="nb">enumerate</span><span class="p">(</span><span class="n">key</span><span class="p">):</span>
<span class="k">if</span> <span class="n">idx</span> <span class="ow">is</span> <span class="bp">Ellipsis</span><span class="p">:</span>
<span class="k">if</span> <span class="n">ell_idx</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">IndexError</span><span class="p">(</span>
<span class="s1">&#39;Cannot use more than one ellipsis (`...`) for indexing&#39;</span>
<span class="p">)</span>
<span class="n">ell_idx</span> <span class="o">=</span> <span class="n">i</span>
<span class="k">else</span><span class="p">:</span>
<span class="c1"># convert primitive type boolean value to mx.np.bool type</span>
<span class="c1"># otherwise will be treated as 1/0</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="nb">bool</span><span class="p">):</span>
<span class="n">idx</span> <span class="o">=</span> <span class="n">array</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">np</span><span class="o">.</span><span class="n">bool_</span><span class="p">)</span>
<span class="k">if</span> <span class="n">idx</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">num_none</span> <span class="o">+=</span> <span class="mi">1</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">NDArrayBase</span><span class="p">)</span> <span class="ow">and</span> <span class="n">idx</span><span class="o">.</span><span class="n">ndim</span> <span class="o">==</span> <span class="mi">0</span> <span class="ow">and</span> <span class="n">idx</span><span class="o">.</span><span class="n">dtype</span> <span class="o">==</span> <span class="n">np</span><span class="o">.</span><span class="n">bool_</span><span class="p">:</span>
<span class="k">if</span> <span class="ow">not</span> <span class="n">idx</span><span class="p">:</span> <span class="c1"># array(False) has priority</span>
<span class="n">prepend</span> <span class="o">=</span> <span class="n">_NDARRAY_ZERO_DIM_BOOL_ARRAY_FALSE</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">prepend</span> <span class="o">=</span> <span class="n">_NDARRAY_ZERO_DIM_BOOL_ARRAY_TRUE</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">NDArrayBase</span><span class="p">)</span> <span class="ow">and</span> <span class="n">idx</span><span class="o">.</span><span class="n">ndim</span> <span class="o">==</span> <span class="mi">0</span> <span class="ow">and</span> <span class="n">idx</span><span class="o">.</span><span class="n">dtype</span> <span class="o">!=</span> <span class="n">np</span><span class="o">.</span><span class="n">bool_</span><span class="p">:</span>
<span class="c1"># This handles ndarray of zero dim. e.g array(1)</span>
<span class="c1"># while advoid converting zero dim boolean array</span>
<span class="c1"># float type will be converted to int</span>
<span class="n">nonell_key</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="nb">int</span><span class="p">(</span><span class="n">idx</span><span class="o">.</span><span class="n">item</span><span class="p">()))</span>
<span class="n">axis</span> <span class="o">+=</span> <span class="mi">1</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">NDArrayBase</span><span class="p">)</span> <span class="ow">and</span> <span class="n">idx</span><span class="o">.</span><span class="n">dtype</span> <span class="o">==</span> <span class="n">np</span><span class="o">.</span><span class="n">bool_</span><span class="p">:</span>
<span class="c1"># Necessary size check before using `nonzero`</span>
<span class="n">check_boolean_array_dimension</span><span class="p">(</span><span class="n">shape</span><span class="p">,</span> <span class="n">axis</span><span class="p">,</span> <span class="n">idx</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="c1"># If the whole array is false and npx.set_np() is not set_up</span>
<span class="c1"># the program will throw infer shape error</span>
<span class="k">if</span> <span class="ow">not</span> <span class="n">is_np_array</span><span class="p">():</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;Cannot perform boolean indexing in legacy mode. Please activate&#39;</span>
<span class="s1">&#39; numpy semantics by calling `npx.set_np()` in the global scope&#39;</span>
<span class="s1">&#39; before calling this function.&#39;</span><span class="p">)</span>
<span class="c1"># Add the arrays from the nonzero result to the index</span>
<span class="n">nonell_key</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span><span class="n">idx</span><span class="o">.</span><span class="n">nonzero</span><span class="p">())</span>
<span class="n">axis</span> <span class="o">+=</span> <span class="n">idx</span><span class="o">.</span><span class="n">ndim</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">nonell_key</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">idx</span><span class="p">)</span>
<span class="n">axis</span> <span class="o">+=</span> <span class="mi">1</span>
<span class="n">nonell_key</span> <span class="o">=</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">nonell_key</span><span class="p">)</span>
<span class="k">if</span> <span class="n">ell_idx</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="c1"># This handles the case of &quot;too few&quot; indices, e.g., `nd.zeros((2, 3))[0]`,</span>
<span class="c1"># where the ellipsis is implicitly after the last entry.</span>
<span class="n">ell_idx</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">nonell_key</span><span class="p">)</span>
<span class="n">ell_ndim</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">shape</span><span class="p">)</span> <span class="o">+</span> <span class="n">num_none</span> <span class="o">-</span> <span class="nb">len</span><span class="p">(</span><span class="n">nonell_key</span><span class="p">)</span>
<span class="n">expanded_key</span> <span class="o">=</span> <span class="p">(</span><span class="n">nonell_key</span><span class="p">[:</span><span class="n">ell_idx</span><span class="p">]</span> <span class="o">+</span>
<span class="p">(</span><span class="nb">slice</span><span class="p">(</span><span class="kc">None</span><span class="p">),)</span> <span class="o">*</span> <span class="n">ell_ndim</span> <span class="o">+</span>
<span class="n">nonell_key</span><span class="p">[</span><span class="n">ell_idx</span><span class="p">:])</span>
<span class="k">return</span> <span class="n">expanded_key</span><span class="p">,</span> <span class="n">prepend</span></div>
<span class="k">def</span> <span class="nf">_int_to_slice</span><span class="p">(</span><span class="n">idx</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return a slice that indexes the same entries as a single int.&quot;&quot;&quot;</span>
<span class="k">if</span> <span class="n">idx</span> <span class="o">==</span> <span class="o">-</span><span class="mi">1</span><span class="p">:</span>
<span class="c1"># Avoid slice(-1, 0)</span>
<span class="k">return</span> <span class="nb">slice</span><span class="p">(</span><span class="o">-</span><span class="mi">1</span><span class="p">,</span> <span class="kc">None</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="nb">slice</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">idx</span> <span class="o">+</span> <span class="mi">1</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_shape_for_bcast</span><span class="p">(</span><span class="n">shape</span><span class="p">,</span> <span class="n">target_ndim</span><span class="p">,</span> <span class="n">new_axes</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return shape with added axes for broadcasting in ``target_ndim`` dimensions.</span>
<span class="sd"> If ``shape`` is shorter than ``target_ndim``, fixed ``1`` entries are inserted</span>
<span class="sd"> into the returned shape, in locations indexed by ``new_axes``. The rest is</span>
<span class="sd"> filled from the back with ``shape`` while possible.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">new_shape</span> <span class="o">=</span> <span class="p">[</span><span class="kc">None</span><span class="p">]</span> <span class="o">*</span> <span class="n">target_ndim</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">shape</span><span class="p">)</span> <span class="o">&lt;</span> <span class="n">target_ndim</span><span class="p">:</span>
<span class="k">for</span> <span class="n">new_ax</span> <span class="ow">in</span> <span class="n">new_axes</span><span class="p">:</span>
<span class="n">new_shape</span><span class="p">[</span><span class="n">new_ax</span><span class="p">]</span> <span class="o">=</span> <span class="mi">1</span>
<span class="c1"># Replace `None` from the right with `shape` entries from the right as</span>
<span class="c1"># long as possible, thereafter with 1.</span>
<span class="n">ax_s</span> <span class="o">=</span> <span class="mi">1</span>
<span class="k">for</span> <span class="n">ax</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="mi">1</span><span class="p">,</span> <span class="n">target_ndim</span> <span class="o">+</span> <span class="mi">1</span><span class="p">):</span>
<span class="k">if</span> <span class="n">new_shape</span><span class="p">[</span><span class="o">-</span><span class="n">ax</span><span class="p">]</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">try</span><span class="p">:</span>
<span class="n">new_shape</span><span class="p">[</span><span class="o">-</span><span class="n">ax</span><span class="p">]</span> <span class="o">=</span> <span class="n">shape</span><span class="p">[</span><span class="o">-</span><span class="n">ax_s</span><span class="p">]</span>
<span class="n">ax_s</span> <span class="o">+=</span> <span class="mi">1</span>
<span class="k">except</span> <span class="ne">IndexError</span><span class="p">:</span>
<span class="n">new_shape</span><span class="p">[</span><span class="o">-</span><span class="n">ax</span><span class="p">]</span> <span class="o">=</span> <span class="mi">1</span>
<span class="k">return</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">new_shape</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_is_advanced_index</span><span class="p">(</span><span class="n">idx</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return whether ``idx`` is an advanced index (array-like or integer).</span>
<span class="sd"> Note that in contrast to basic indexing, integers are considered advanced</span>
<span class="sd"> indices in the context of advanced indexing as they participate in</span>
<span class="sd"> broadcasting.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="p">(</span><span class="n">NDArray</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">ndarray</span><span class="p">,</span> <span class="n">integer_types</span><span class="p">,</span> <span class="nb">list</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)):</span>
<span class="k">return</span> <span class="kc">True</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="n">py_slice</span><span class="p">)</span> <span class="ow">or</span> <span class="n">idx</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">return</span> <span class="kc">False</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="nb">range</span><span class="p">):</span>
<span class="k">return</span> <span class="kc">True</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">RuntimeError</span><span class="p">(</span><span class="s1">&#39;illegal index type </span><span class="si">{}</span><span class="s1">&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">idx</span><span class="p">)))</span>
<div class="viewcode-block" id="get_indexing_dispatch_code"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.get_indexing_dispatch_code">[docs]</a><span class="k">def</span> <span class="nf">get_indexing_dispatch_code</span><span class="p">(</span><span class="n">key</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a dispatch code for calling basic or advanced indexing functions.&quot;&quot;&quot;</span>
<span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">key</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)</span>
<span class="k">for</span> <span class="n">idx</span> <span class="ow">in</span> <span class="n">key</span><span class="p">:</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="p">(</span><span class="n">NDArray</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">ndarray</span><span class="p">,</span> <span class="nb">list</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">,</span> <span class="nb">range</span><span class="p">)):</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">)</span> <span class="ow">and</span> <span class="nb">len</span><span class="p">(</span><span class="n">idx</span><span class="p">)</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span> <span class="n">_NDARRAY_EMPTY_TUPLE_INDEXING</span>
<span class="k">return</span> <span class="n">_NDARRAY_ADVANCED_INDEXING</span>
<span class="k">elif</span> <span class="ow">not</span> <span class="p">(</span><span class="nb">isinstance</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="p">(</span><span class="n">py_slice</span><span class="p">,</span> <span class="n">integer_types</span><span class="p">))</span> <span class="ow">or</span> <span class="n">idx</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">):</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span>
<span class="s1">&#39;NDArray does not support slicing with key </span><span class="si">{}</span><span class="s1"> of type </span><span class="si">{}</span><span class="s1">.&#39;</span>
<span class="s1">&#39;&#39;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">idx</span><span class="p">,</span> <span class="nb">type</span><span class="p">(</span><span class="n">idx</span><span class="p">))</span>
<span class="p">)</span>
<span class="k">return</span> <span class="n">_NDARRAY_BASIC_INDEXING</span></div>
<span class="k">def</span> <span class="nf">_get_index_range</span><span class="p">(</span><span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="p">,</span> <span class="n">length</span><span class="p">,</span> <span class="n">step</span><span class="o">=</span><span class="mi">1</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Given start, stop, step and array length, return</span>
<span class="sd"> absolute values of start, stop, and step for generating index range.</span>
<span class="sd"> The returned values have been compensated by adding length if they</span>
<span class="sd"> are less than zero for all the cases but slice(None, None, -1).</span>
<span class="sd"> Note that the returned value of stop is not necessarily &gt;= 0, since</span>
<span class="sd"> absolute stop is -1 in the case of slice(None, None, -1).&quot;&quot;&quot;</span>
<span class="k">if</span> <span class="n">step</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;step size cannot be zero&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="n">length</span> <span class="o">&lt;</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;array length cannot be less than zero&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="n">step</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">step</span> <span class="o">=</span> <span class="mi">1</span>
<span class="k">if</span> <span class="n">start</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">if</span> <span class="n">step</span> <span class="o">&gt;</span> <span class="mi">0</span><span class="p">:</span>
<span class="n">start</span> <span class="o">=</span> <span class="mi">0</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">start</span> <span class="o">=</span> <span class="n">length</span> <span class="o">-</span> <span class="mi">1</span>
<span class="k">elif</span> <span class="n">start</span> <span class="o">&lt;</span> <span class="mi">0</span><span class="p">:</span>
<span class="n">start</span> <span class="o">+=</span> <span class="n">length</span>
<span class="k">if</span> <span class="n">start</span> <span class="o">&lt;</span> <span class="mi">0</span><span class="p">:</span>
<span class="n">start</span> <span class="o">=</span> <span class="mi">0</span>
<span class="k">elif</span> <span class="n">start</span> <span class="o">&gt;=</span> <span class="n">length</span><span class="p">:</span>
<span class="n">start</span> <span class="o">=</span> <span class="n">length</span>
<span class="k">if</span> <span class="n">stop</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">if</span> <span class="n">step</span> <span class="o">&gt;</span> <span class="mi">0</span><span class="p">:</span>
<span class="n">stop</span> <span class="o">=</span> <span class="n">length</span>
<span class="k">else</span><span class="p">:</span>
<span class="c1"># this supports case such as ::-1</span>
<span class="c1"># stop = -1 here refers to the element before index 0,</span>
<span class="c1"># instead of the last element in the array</span>
<span class="n">stop</span> <span class="o">=</span> <span class="o">-</span><span class="mi">1</span>
<span class="k">elif</span> <span class="n">stop</span> <span class="o">&lt;</span> <span class="mi">0</span><span class="p">:</span>
<span class="n">stop</span> <span class="o">+=</span> <span class="n">length</span>
<span class="k">if</span> <span class="n">stop</span> <span class="o">&lt;</span> <span class="mi">0</span><span class="p">:</span>
<span class="n">stop</span> <span class="o">=</span> <span class="mi">0</span>
<span class="k">elif</span> <span class="n">stop</span> <span class="o">&gt;</span> <span class="n">length</span><span class="p">:</span>
<span class="n">stop</span> <span class="o">=</span> <span class="n">length</span>
<span class="k">return</span> <span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="p">,</span> <span class="n">step</span>
<div class="viewcode-block" id="get_oshape_of_gather_nd_op"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.get_oshape_of_gather_nd_op">[docs]</a><span class="k">def</span> <span class="nf">get_oshape_of_gather_nd_op</span><span class="p">(</span><span class="n">dshape</span><span class="p">,</span> <span class="n">ishape</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Given data and index shapes, get the output `NDArray` shape.</span>
<span class="sd"> This basically implements the infer shape logic of op gather_nd.&quot;&quot;&quot;</span>
<span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="n">dshape</span><span class="p">)</span> <span class="o">&gt;</span> <span class="mi">0</span> <span class="ow">and</span> <span class="nb">len</span><span class="p">(</span><span class="n">ishape</span><span class="p">)</span> <span class="o">&gt;</span> <span class="mi">0</span>
<span class="n">oshape</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">ishape</span><span class="p">[</span><span class="mi">1</span><span class="p">:])</span>
<span class="k">if</span> <span class="n">ishape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">&lt;</span> <span class="nb">len</span><span class="p">(</span><span class="n">dshape</span><span class="p">):</span>
<span class="n">oshape</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span><span class="n">dshape</span><span class="p">[</span><span class="n">ishape</span><span class="p">[</span><span class="mi">0</span><span class="p">]:])</span>
<span class="k">return</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">oshape</span><span class="p">)</span></div>
<span class="k">def</span> <span class="nf">_get_dim_size</span><span class="p">(</span><span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="p">,</span> <span class="n">step</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Given start, stop, and step, calculate the number of elements</span>
<span class="sd"> of this slice.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">assert</span> <span class="n">step</span> <span class="o">!=</span> <span class="mi">0</span>
<span class="k">if</span> <span class="n">stop</span> <span class="o">==</span> <span class="n">start</span><span class="p">:</span>
<span class="k">return</span> <span class="mi">0</span>
<span class="k">if</span> <span class="n">step</span> <span class="o">&gt;</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">assert</span> <span class="n">start</span> <span class="o">&lt;</span> <span class="n">stop</span>
<span class="n">dim_size</span> <span class="o">=</span> <span class="p">(</span><span class="n">stop</span> <span class="o">-</span> <span class="n">start</span> <span class="o">-</span> <span class="mi">1</span><span class="p">)</span> <span class="o">//</span> <span class="n">step</span> <span class="o">+</span> <span class="mi">1</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">assert</span> <span class="n">stop</span> <span class="o">&lt;</span> <span class="n">start</span>
<span class="n">dim_size</span> <span class="o">=</span> <span class="p">(</span><span class="n">start</span> <span class="o">-</span> <span class="n">stop</span> <span class="o">-</span> <span class="mi">1</span><span class="p">)</span> <span class="o">//</span> <span class="p">(</span><span class="o">-</span><span class="n">step</span><span class="p">)</span> <span class="o">+</span> <span class="mi">1</span>
<span class="k">return</span> <span class="n">dim_size</span>
<span class="k">def</span> <span class="nf">_get_slice_len</span><span class="p">(</span><span class="n">slc</span><span class="p">,</span> <span class="n">seq_length</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Given a python slice object and the length of the sequence, calculate the number of elements</span>
<span class="sd"> in the slice.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> slc : py_slice</span>
<span class="sd"> The slice object</span>
<span class="sd"> seq_length : int</span>
<span class="sd"> The length of the object you are going to apply the slice on</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> ret : int</span>
<span class="sd"> Total number of elements in the slice</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="p">,</span> <span class="n">step</span> <span class="o">=</span> <span class="n">slc</span><span class="o">.</span><span class="n">indices</span><span class="p">(</span><span class="n">seq_length</span><span class="p">)</span>
<span class="k">return</span> <span class="nb">max</span><span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="p">(</span><span class="n">stop</span> <span class="o">-</span> <span class="n">start</span> <span class="o">+</span> <span class="p">(</span><span class="n">step</span> <span class="o">-</span> <span class="p">(</span><span class="mi">1</span> <span class="k">if</span> <span class="n">step</span> <span class="o">&gt;</span> <span class="mi">0</span> <span class="k">else</span> <span class="o">-</span><span class="mi">1</span><span class="p">)))</span> <span class="o">//</span> <span class="n">step</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_get_broadcast_shape</span><span class="p">(</span><span class="n">shape1</span><span class="p">,</span> <span class="n">shape2</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Given two shapes that are not identical, find the shape</span>
<span class="sd"> that both input shapes can broadcast to.&quot;&quot;&quot;</span>
<span class="k">if</span> <span class="n">shape1</span> <span class="o">==</span> <span class="n">shape2</span><span class="p">:</span>
<span class="k">return</span> <span class="n">shape1</span>
<span class="n">length1</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">shape1</span><span class="p">)</span>
<span class="n">length2</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">shape2</span><span class="p">)</span>
<span class="k">if</span> <span class="n">length1</span> <span class="o">&gt;</span> <span class="n">length2</span><span class="p">:</span>
<span class="n">shape</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">shape1</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">shape</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">shape2</span><span class="p">)</span>
<span class="n">i</span> <span class="o">=</span> <span class="nb">max</span><span class="p">(</span><span class="n">length1</span><span class="p">,</span> <span class="n">length2</span><span class="p">)</span> <span class="o">-</span> <span class="mi">1</span>
<span class="k">for</span> <span class="n">a</span><span class="p">,</span> <span class="n">b</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">shape1</span><span class="p">[::</span><span class="o">-</span><span class="mi">1</span><span class="p">],</span> <span class="n">shape2</span><span class="p">[::</span><span class="o">-</span><span class="mi">1</span><span class="p">]):</span>
<span class="k">if</span> <span class="n">a</span> <span class="o">!=</span> <span class="mi">1</span> <span class="ow">and</span> <span class="n">b</span> <span class="o">!=</span> <span class="mi">1</span> <span class="ow">and</span> <span class="n">a</span> <span class="o">!=</span> <span class="n">b</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="sa">f</span><span class="s1">&#39;shape1=</span><span class="si">{</span><span class="n">shape1</span><span class="si">}</span><span class="s1"> is not broadcastable to shape2=</span><span class="si">{</span><span class="n">shape2</span><span class="si">}</span><span class="s1">&#39;</span><span class="p">)</span>
<span class="n">shape</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="o">=</span> <span class="n">b</span> <span class="k">if</span> <span class="n">a</span> <span class="o">==</span> <span class="mi">1</span> <span class="k">else</span> <span class="n">a</span>
<span class="n">i</span> <span class="o">-=</span> <span class="mi">1</span>
<span class="k">return</span> <span class="nb">tuple</span><span class="p">(</span><span class="n">shape</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">_broadcast_shapes</span><span class="p">(</span><span class="n">seq</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return the broadcast shape of all advanced indices in ``seq``.</span>
<span class="sd"> All entries are assumed to have a ``shape`` property.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">reduce</span><span class="p">(</span><span class="n">_get_broadcast_shape</span><span class="p">,</span> <span class="p">[</span><span class="n">x</span><span class="o">.</span><span class="n">shape</span> <span class="k">for</span> <span class="n">x</span> <span class="ow">in</span> <span class="n">seq</span><span class="p">],</span> <span class="p">())</span>
<div class="viewcode-block" id="onehot_encode"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.onehot_encode">[docs]</a><span class="k">def</span> <span class="nf">onehot_encode</span><span class="p">(</span><span class="n">indices</span><span class="p">,</span> <span class="n">out</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;One-hot encoding indices into matrix out.</span>
<span class="sd"> .. note:: `onehot_encode` is deprecated. Use `one_hot` instead.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_onehot_encode</span><span class="p">(</span><span class="n">indices</span><span class="p">,</span> <span class="n">out</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="n">out</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="ones"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.ones">[docs]</a><span class="k">def</span> <span class="nf">ones</span><span class="p">(</span><span class="n">shape</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a new array filled with all ones, with the given shape and type.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> shape : int or tuple of int or list of int</span>
<span class="sd"> The shape of the empty array.</span>
<span class="sd"> ctx : Context, optional</span>
<span class="sd"> An optional device context.</span>
<span class="sd"> Defaults to the current default context (``mxnet.context.current_context()``).</span>
<span class="sd"> dtype : str or numpy.dtype, optional</span>
<span class="sd"> An optional value type (default is `float32`).</span>
<span class="sd"> out : NDArray, optional</span>
<span class="sd"> The output NDArray (default is `None`).</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> A new array of the specified shape filled with all ones.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.ones(1).asnumpy()</span>
<span class="sd"> array([ 1.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.ones((1,2), mx.gpu(0))</span>
<span class="sd"> &lt;NDArray 1x2 @gpu(0)&gt;</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.ones((1,2), dtype=&#39;float16&#39;).asnumpy()</span>
<span class="sd"> array([[ 1., 1.]], dtype=float16)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= unused-argument</span>
<span class="k">if</span> <span class="n">ctx</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">ctx</span> <span class="o">=</span> <span class="n">current_device</span><span class="p">()</span>
<span class="n">dtype</span> <span class="o">=</span> <span class="n">mx_real_t</span> <span class="k">if</span> <span class="n">dtype</span> <span class="ow">is</span> <span class="kc">None</span> <span class="k">else</span> <span class="n">dtype</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_ones</span><span class="p">(</span><span class="n">shape</span><span class="o">=</span><span class="n">shape</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">dtype</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="full"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.full">[docs]</a><span class="k">def</span> <span class="nf">full</span><span class="p">(</span><span class="n">shape</span><span class="p">,</span> <span class="n">val</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">mx_real_t</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a new array of given shape and type, filled with the given value `val`.</span>
<span class="sd"> Parameters</span>
<span class="sd"> --------</span>
<span class="sd"> shape : int or tuple of int</span>
<span class="sd"> The shape of the new array.</span>
<span class="sd"> val : scalar</span>
<span class="sd"> Fill value.</span>
<span class="sd"> ctx : Context, optional</span>
<span class="sd"> Device context (default is the current default context).</span>
<span class="sd"> dtype : `str` or `numpy.dtype`, optional</span>
<span class="sd"> The data type of the returned `NDArray`. The default datatype is `float32`.</span>
<span class="sd"> out : NDArray, optional</span>
<span class="sd"> The output NDArray (default is `None`).</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> `NDArray` filled with `val`, with the given shape, ctx, and dtype.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.full(1, 2.0).asnumpy()</span>
<span class="sd"> array([ 2.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.full((1, 2), 2.0, mx.gpu(0))</span>
<span class="sd"> &lt;NDArray 1x2 @gpu(0)&gt;</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.full((1, 2), 2.0, dtype=&#39;float16&#39;).asnumpy()</span>
<span class="sd"> array([[ 2., 2.]], dtype=float16)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">out</span> <span class="o">=</span> <span class="n">empty</span><span class="p">(</span><span class="n">shape</span><span class="p">,</span> <span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="p">)</span> <span class="k">if</span> <span class="n">out</span> <span class="ow">is</span> <span class="kc">None</span> <span class="k">else</span> <span class="n">out</span>
<span class="n">out</span><span class="p">[:]</span> <span class="o">=</span> <span class="n">val</span>
<span class="k">return</span> <span class="n">out</span></div>
<span class="k">def</span> <span class="nf">array</span><span class="p">(</span><span class="n">source_array</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Creates an array from any object exposing the array interface.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> source_array : array_like</span>
<span class="sd"> An object exposing the array interface, an object whose `__array__`</span>
<span class="sd"> method returns an array, or any (nested) sequence.</span>
<span class="sd"> ctx : Context, optional</span>
<span class="sd"> Device context (default is the current default context).</span>
<span class="sd"> dtype : str or numpy.dtype, optional</span>
<span class="sd"> The data type of the output array. The default dtype is ``source_array.dtype``</span>
<span class="sd"> if `source_array` is an `NDArray`, `float32` otherwise.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> An `NDArray` with the same contents as the `source_array`.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">source_array</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">):</span>
<span class="n">dtype</span> <span class="o">=</span> <span class="n">source_array</span><span class="o">.</span><span class="n">dtype</span> <span class="k">if</span> <span class="n">dtype</span> <span class="ow">is</span> <span class="kc">None</span> <span class="k">else</span> <span class="n">dtype</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">dtype</span> <span class="o">=</span> <span class="n">mx_real_t</span> <span class="k">if</span> <span class="n">dtype</span> <span class="ow">is</span> <span class="kc">None</span> <span class="k">else</span> <span class="n">dtype</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">source_array</span><span class="p">,</span> <span class="n">np</span><span class="o">.</span><span class="n">ndarray</span><span class="p">):</span>
<span class="k">try</span><span class="p">:</span>
<span class="n">source_array</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">array</span><span class="p">(</span><span class="n">source_array</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">dtype</span><span class="p">)</span>
<span class="k">except</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="s1">&#39;source_array must be array like object&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="n">source_array</span><span class="o">.</span><span class="n">shape</span> <span class="o">==</span> <span class="p">():</span>
<span class="c1"># In this case we can&#39;t assign, so we need to go through an auxiliary array</span>
<span class="n">arr</span> <span class="o">=</span> <span class="n">empty</span><span class="p">((</span><span class="mi">1</span><span class="p">,),</span> <span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="p">)</span>
<span class="n">arr</span><span class="p">[:]</span> <span class="o">=</span> <span class="n">source_array</span>
<span class="k">return</span> <span class="n">arr</span><span class="o">.</span><span class="n">reshape</span><span class="p">(())</span>
<span class="k">elif</span> <span class="n">source_array</span><span class="o">.</span><span class="n">size</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span> <span class="n">empty</span><span class="p">(</span><span class="n">source_array</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">arr</span> <span class="o">=</span> <span class="n">empty</span><span class="p">(</span><span class="n">source_array</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="p">)</span>
<span class="n">arr</span><span class="p">[:]</span> <span class="o">=</span> <span class="n">source_array</span>
<span class="k">return</span> <span class="n">arr</span>
<div class="viewcode-block" id="moveaxis"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.moveaxis">[docs]</a><span class="k">def</span> <span class="nf">moveaxis</span><span class="p">(</span><span class="n">tensor</span><span class="p">,</span> <span class="n">source</span><span class="p">,</span> <span class="n">destination</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Moves the `source` axis into the `destination` position</span>
<span class="sd"> while leaving the other axes in their original order</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> tensor : mx.nd.array</span>
<span class="sd"> The array which axes should be reordered</span>
<span class="sd"> source : int or sequence of int</span>
<span class="sd"> Original position of the axes to move. Can be negative but must be unique.</span>
<span class="sd"> destination : int or sequence of int</span>
<span class="sd"> Destination position for each of the original axes. Can be negative but must be unique.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> result : mx.nd.array</span>
<span class="sd"> Array with moved axes.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; X = mx.nd.array([[1, 2, 3], [4, 5, 6]])</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.moveaxis(X, 0, 1).shape</span>
<span class="sd"> (3L, 2L)</span>
<span class="sd"> &gt;&gt;&gt; X = mx.nd.zeros((3, 4, 5))</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.moveaxis(X, [0, 1], [-1, -2]).shape</span>
<span class="sd"> (5, 4, 3)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">try</span><span class="p">:</span>
<span class="n">source</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">core</span><span class="o">.</span><span class="n">numeric</span><span class="o">.</span><span class="n">normalize_axis_tuple</span><span class="p">(</span><span class="n">source</span><span class="p">,</span> <span class="n">tensor</span><span class="o">.</span><span class="n">ndim</span><span class="p">)</span>
<span class="k">except</span> <span class="ne">IndexError</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;Source should verify 0 &lt;= source &lt; tensor.ndim&#39;</span>
<span class="sa">f</span><span class="s1">&#39;Got </span><span class="si">{</span><span class="n">source</span><span class="si">}</span><span class="s1">&#39;</span><span class="p">)</span>
<span class="k">try</span><span class="p">:</span>
<span class="n">destination</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">core</span><span class="o">.</span><span class="n">numeric</span><span class="o">.</span><span class="n">normalize_axis_tuple</span><span class="p">(</span><span class="n">destination</span><span class="p">,</span> <span class="n">tensor</span><span class="o">.</span><span class="n">ndim</span><span class="p">)</span>
<span class="k">except</span> <span class="ne">IndexError</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="sa">f</span><span class="s1">&#39;Destination should verify 0 &lt;= destination &lt; tensor.ndim (</span><span class="si">{</span><span class="n">tensor</span><span class="o">.</span><span class="n">ndim</span><span class="si">}</span><span class="s1">).&#39;</span><span class="p">,</span>
<span class="sa">f</span><span class="s1">&#39;Got </span><span class="si">{</span><span class="n">destination</span><span class="si">}</span><span class="s1">&#39;</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">source</span><span class="p">)</span> <span class="o">!=</span> <span class="nb">len</span><span class="p">(</span><span class="n">destination</span><span class="p">):</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;`source` and `destination` arguments must have &#39;</span>
<span class="s1">&#39;the same number of elements&#39;</span><span class="p">)</span>
<span class="n">order</span> <span class="o">=</span> <span class="p">[</span><span class="n">n</span> <span class="k">for</span> <span class="n">n</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">tensor</span><span class="o">.</span><span class="n">ndim</span><span class="p">)</span> <span class="k">if</span> <span class="n">n</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">source</span><span class="p">]</span>
<span class="k">for</span> <span class="n">dest</span><span class="p">,</span> <span class="n">src</span> <span class="ow">in</span> <span class="nb">sorted</span><span class="p">(</span><span class="nb">zip</span><span class="p">(</span><span class="n">destination</span><span class="p">,</span> <span class="n">source</span><span class="p">)):</span>
<span class="n">order</span><span class="o">.</span><span class="n">insert</span><span class="p">(</span><span class="n">dest</span><span class="p">,</span> <span class="n">src</span><span class="p">)</span>
<span class="k">return</span> <span class="n">op</span><span class="o">.</span><span class="n">transpose</span><span class="p">(</span><span class="n">tensor</span><span class="p">,</span> <span class="n">order</span><span class="p">)</span></div>
<span class="c1"># pylint: disable= no-member, protected-access, too-many-arguments, redefined-outer-name</span>
<div class="viewcode-block" id="arange"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.arange">[docs]</a><span class="k">def</span> <span class="nf">arange</span><span class="p">(</span><span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">step</span><span class="o">=</span><span class="mf">1.0</span><span class="p">,</span> <span class="n">repeat</span><span class="o">=</span><span class="mi">1</span><span class="p">,</span> <span class="n">infer_range</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">mx_real_t</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns evenly spaced values within a given interval.</span>
<span class="sd"> Values are generated within the half-open interval [`start`, `stop`). In other</span>
<span class="sd"> words, the interval includes `start` but excludes `stop`. The function is</span>
<span class="sd"> similar to the built-in Python function `range` and to `numpy.arange`,</span>
<span class="sd"> but returns an `NDArray`.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> start : number, optional</span>
<span class="sd"> Start of interval. The default start value is 0.</span>
<span class="sd"> stop : number</span>
<span class="sd"> End of interval.</span>
<span class="sd"> step : number, optional</span>
<span class="sd"> Spacing between values. The default step size is 1.</span>
<span class="sd"> repeat : int, optional</span>
<span class="sd"> Number of times to repeat each element. The default repeat count is 1.</span>
<span class="sd"> infer_range : boolean, optional</span>
<span class="sd"> Infer the stop position from the start, step, repeat, and output tensor size.</span>
<span class="sd"> Deprecated. Only False is supported.</span>
<span class="sd"> ctx : Context, optional</span>
<span class="sd"> Device context. Default context is the current default context.</span>
<span class="sd"> dtype : str or numpy.dtype, optional</span>
<span class="sd"> The data type of the `NDArray`. The default datatype is `np.float32`.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> `NDArray` of evenly spaced values in the specified range.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.arange(3).asnumpy()</span>
<span class="sd"> array([ 0., 1., 2.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.arange(2, 6).asnumpy()</span>
<span class="sd"> array([ 2., 3., 4., 5.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.arange(2, 6, step=2).asnumpy()</span>
<span class="sd"> array([ 2., 4.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.arange(2, 6, step=1.5, repeat=2).asnumpy()</span>
<span class="sd"> array([ 2. , 2. , 3.5, 3.5, 5. , 5. ], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.arange(2, 6, step=2, repeat=3, dtype=&#39;int32&#39;).asnumpy()</span>
<span class="sd"> array([2, 2, 2, 4, 4, 4], dtype=int32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="n">infer_range</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s1">&#39;`infer_range` argument has been deprecated&#39;</span><span class="p">,</span>
<span class="ne">DeprecationWarning</span><span class="p">)</span>
<span class="k">if</span> <span class="n">ctx</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">ctx</span> <span class="o">=</span> <span class="n">current_device</span><span class="p">()</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_arange</span><span class="p">(</span><span class="n">start</span><span class="o">=</span><span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="o">=</span><span class="n">stop</span><span class="p">,</span> <span class="n">step</span><span class="o">=</span><span class="n">step</span><span class="p">,</span> <span class="n">repeat</span><span class="o">=</span><span class="n">repeat</span><span class="p">,</span>
<span class="n">infer_range</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">dtype</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="nb">str</span><span class="p">(</span><span class="n">ctx</span><span class="p">))</span></div>
<span class="c1"># pylint: enable= no-member, protected-access, too-many-arguments</span>
<span class="c1"># pylint: disable= no-member, protected-access, too-many-arguments</span>
<div class="viewcode-block" id="linspace"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.linspace">[docs]</a><span class="k">def</span> <span class="nf">linspace</span><span class="p">(</span><span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="p">,</span> <span class="n">num</span><span class="p">,</span> <span class="n">endpoint</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">mx_real_t</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return evenly spaced numbers within a specified interval.</span>
<span class="sd"> Values are generated within the half-open interval [`start`, `stop`) or</span>
<span class="sd"> closed interval [start, stop] depending on whether `endpoint` is True or</span>
<span class="sd"> False. The function is similar to `numpy.linspace`, but returns an `NDArray`.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> start : number</span>
<span class="sd"> Start of interval.</span>
<span class="sd"> stop : number</span>
<span class="sd"> End of interval, unless endpoint is set to False. In that case,</span>
<span class="sd"> the sequence consists of all but the last of `num + 1` evenly spaced</span>
<span class="sd"> samples, so that stop is excluded. Note that the step size changes</span>
<span class="sd"> when endpoint is False.</span>
<span class="sd"> num : number</span>
<span class="sd"> Number of samples to generate. Must be non-negative.</span>
<span class="sd"> endpoint : bool</span>
<span class="sd"> If True, stop is the last sample. Otherwise, it is not included.</span>
<span class="sd"> The default is True.</span>
<span class="sd"> ctx : Context, optional</span>
<span class="sd"> Device context. Default context is the current default context.</span>
<span class="sd"> dtype : str or numpy.dtype, optional</span>
<span class="sd"> The data type of the `NDArray`. The default datatype is `np.float32`.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> `NDArray` of evenly spaced values in the specified range.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.linspace(2.0, 3.0, 5).asnumpy()</span>
<span class="sd"> array([ 2., 2.25., 2.5, 2.75, 3.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.linspace(2.0, 3.0, 5, endpoint=False).asnumpy()</span>
<span class="sd"> array([ 2., 2.2., 2.4, 2.6, 2.8], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="n">ctx</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">ctx</span> <span class="o">=</span> <span class="n">current_device</span><span class="p">()</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_linspace</span><span class="p">(</span><span class="n">start</span><span class="o">=</span><span class="n">start</span><span class="p">,</span> <span class="n">stop</span><span class="o">=</span><span class="n">stop</span><span class="p">,</span> <span class="n">num</span><span class="o">=</span><span class="n">num</span><span class="p">,</span>
<span class="n">endpoint</span><span class="o">=</span><span class="n">endpoint</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">dtype</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="nb">str</span><span class="p">(</span><span class="n">ctx</span><span class="p">))</span></div>
<span class="c1"># pylint: disable= no-member, protected-access, too-many-arguments</span>
<span class="c1">#pylint: disable= too-many-arguments, no-member, protected-access</span>
<span class="k">def</span> <span class="nf">_ufunc_helper</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">,</span> <span class="n">fn_array</span><span class="p">,</span> <span class="n">fn_scalar</span><span class="p">,</span> <span class="n">lfn_scalar</span><span class="p">,</span> <span class="n">rfn_scalar</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot; Helper function for element-wise operation.</span>
<span class="sd"> The function will perform numpy-like broadcasting if needed and call different functions.</span>
<span class="sd"> Parameters</span>
<span class="sd"> --------</span>
<span class="sd"> lhs : NDArray or numeric value</span>
<span class="sd"> Left-hand side operand.</span>
<span class="sd"> rhs : NDArray or numeric value</span>
<span class="sd"> Right-hand operand,</span>
<span class="sd"> fn_array : function</span>
<span class="sd"> Function to be called if both lhs and rhs are of ``NDArray`` type.</span>
<span class="sd"> fn_scalar : function</span>
<span class="sd"> Function to be called if both lhs and rhs are numeric values.</span>
<span class="sd"> lfn_scalar : function</span>
<span class="sd"> Function to be called if lhs is ``NDArray`` while rhs is numeric value</span>
<span class="sd"> rfn_scalar : function</span>
<span class="sd"> Function to be called if lhs is numeric value while rhs is ``NDArray``;</span>
<span class="sd"> if none is provided, then the function is commutative, so rfn_scalar is equal to lfn_scalar</span>
<span class="sd"> Returns</span>
<span class="sd"> --------</span>
<span class="sd"> NDArray</span>
<span class="sd"> result array</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">rhs</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="k">return</span> <span class="n">fn_scalar</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">if</span> <span class="n">rfn_scalar</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="c1"># commutative function</span>
<span class="k">return</span> <span class="n">lfn_scalar</span><span class="p">(</span><span class="n">rhs</span><span class="p">,</span> <span class="nb">float</span><span class="p">(</span><span class="n">lhs</span><span class="p">))</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="n">rfn_scalar</span><span class="p">(</span><span class="n">rhs</span><span class="p">,</span> <span class="nb">float</span><span class="p">(</span><span class="n">lhs</span><span class="p">))</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">rhs</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="k">return</span> <span class="n">lfn_scalar</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="nb">float</span><span class="p">(</span><span class="n">rhs</span><span class="p">))</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">rhs</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">):</span>
<span class="k">return</span> <span class="n">fn_array</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">TypeError</span><span class="p">(</span><span class="sa">f</span><span class="s1">&#39;type </span><span class="si">{</span><span class="nb">str</span><span class="p">(</span><span class="nb">type</span><span class="p">(</span><span class="n">rhs</span><span class="p">))</span><span class="si">}</span><span class="s1"> not supported&#39;</span><span class="p">)</span>
<span class="c1">#pylint: enable= too-many-arguments, no-member, protected-access</span>
<div class="viewcode-block" id="add"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.add">[docs]</a><span class="k">def</span> <span class="nf">add</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns element-wise sum of the input arrays with broadcasting.</span>
<span class="sd"> Equivalent to ``lhs + rhs``, ``mx.nd.broadcast_add(lhs, rhs)`` and</span>
<span class="sd"> ``mx.nd.broadcast_plus(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array to be added.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array to be added.</span>
<span class="sd"> If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> The element-wise sum of the input arrays.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x+2).asnumpy()</span>
<span class="sd"> array([[ 3., 3., 3.],</span>
<span class="sd"> [ 3., 3., 3.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x+y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 2., 2., 2.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.add(x,y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 2., 2., 2.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (z + y).asnumpy()</span>
<span class="sd"> array([[ 0., 1.],</span>
<span class="sd"> [ 1., 2.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_add</span><span class="p">,</span>
<span class="n">operator</span><span class="o">.</span><span class="n">add</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_plus_scalar</span><span class="p">,</span>
<span class="kc">None</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="subtract"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.subtract">[docs]</a><span class="k">def</span> <span class="nf">subtract</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns element-wise difference of the input arrays with broadcasting.</span>
<span class="sd"> Equivalent to ``lhs - rhs``, ``mx.nd.broadcast_sub(lhs, rhs)`` and</span>
<span class="sd"> ``mx.nd.broadcast_minus(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array to be subtracted.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array to be subtracted.</span>
<span class="sd"> If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> The element-wise difference of the input arrays.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x-2).asnumpy()</span>
<span class="sd"> array([[-1., -1., -1.],</span>
<span class="sd"> [-1., -1., -1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x-y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 0., 0., 0.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.subtract(x,y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 0., 0., 0.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (z-y).asnumpy()</span>
<span class="sd"> array([[ 0., 1.],</span>
<span class="sd"> [-1., 0.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_sub</span><span class="p">,</span>
<span class="n">operator</span><span class="o">.</span><span class="n">sub</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_minus_scalar</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_rminus_scalar</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="multiply"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.multiply">[docs]</a><span class="k">def</span> <span class="nf">multiply</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns element-wise product of the input arrays with broadcasting.</span>
<span class="sd"> Equivalent to ``lhs * rhs`` and ``mx.nd.broadcast_mul(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array to be multiplied.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array to be multiplied.</span>
<span class="sd"> If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> The element-wise multiplication of the input arrays.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x*2).asnumpy()</span>
<span class="sd"> array([[ 2., 2., 2.],</span>
<span class="sd"> [ 2., 2., 2.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x*y).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.multiply(x, y).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (z*y).asnumpy()</span>
<span class="sd"> array([[ 0., 0.],</span>
<span class="sd"> [ 0., 1.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_mul</span><span class="p">,</span>
<span class="n">operator</span><span class="o">.</span><span class="n">mul</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_mul_scalar</span><span class="p">,</span>
<span class="kc">None</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="divide"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.divide">[docs]</a><span class="k">def</span> <span class="nf">divide</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns element-wise division of the input arrays with broadcasting.</span>
<span class="sd"> Equivalent to ``lhs / rhs`` and ``mx.nd.broadcast_div(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array in division.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array in division.</span>
<span class="sd"> The arrays to be divided. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> The element-wise division of the input arrays.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))*6</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.ones((2,1))*2</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 6., 6., 6.],</span>
<span class="sd"> [ 6., 6., 6.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 2.],</span>
<span class="sd"> [ 2.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x/2</span>
<span class="sd"> &lt;NDArray 2x3 @cpu(0)&gt;</span>
<span class="sd"> &gt;&gt;&gt; (x/3).asnumpy()</span>
<span class="sd"> array([[ 2., 2., 2.],</span>
<span class="sd"> [ 2., 2., 2.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x/y).asnumpy()</span>
<span class="sd"> array([[ 3., 3., 3.],</span>
<span class="sd"> [ 3., 3., 3.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.divide(x,y).asnumpy()</span>
<span class="sd"> array([[ 3., 3., 3.],</span>
<span class="sd"> [ 3., 3., 3.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_div</span><span class="p">,</span>
<span class="n">operator</span><span class="o">.</span><span class="n">truediv</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_div_scalar</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_rdiv_scalar</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="modulo"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.modulo">[docs]</a><span class="k">def</span> <span class="nf">modulo</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns element-wise modulo of the input arrays with broadcasting.</span>
<span class="sd"> Equivalent to ``lhs % rhs`` and ``mx.nd.broadcast_mod(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array in modulo.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array in modulo.</span>
<span class="sd"> The arrays to be taken modulo. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> The element-wise modulo of the input arrays.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))*6</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.ones((2,1))*4</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 6., 6., 6.],</span>
<span class="sd"> [ 6., 6., 6.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 4.],</span>
<span class="sd"> [ 4.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; x%5</span>
<span class="sd"> &lt;NDArray 2x3 @cpu(0)&gt;</span>
<span class="sd"> &gt;&gt;&gt; (x%5).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x%y).asnumpy()</span>
<span class="sd"> array([[ 2., 2., 2.],</span>
<span class="sd"> [ 2., 2., 2.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.modulo(x,y).asnumpy()</span>
<span class="sd"> array([[ 2., 2., 2.],</span>
<span class="sd"> [ 2., 2., 2.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_mod</span><span class="p">,</span>
<span class="n">operator</span><span class="o">.</span><span class="n">mod</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_mod_scalar</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_rmod_scalar</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="power"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.power">[docs]</a><span class="k">def</span> <span class="nf">power</span><span class="p">(</span><span class="n">base</span><span class="p">,</span> <span class="n">exp</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns result of first array elements raised to powers from second array, element-wise</span>
<span class="sd"> with broadcasting.</span>
<span class="sd"> Equivalent to ``base ** exp`` and ``mx.nd.broadcast_power(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> base : scalar or NDArray</span>
<span class="sd"> The base array</span>
<span class="sd"> exp : scalar or NDArray</span>
<span class="sd"> The exponent array. If ``base.shape != exp.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> --------</span>
<span class="sd"> NDArray</span>
<span class="sd"> The bases in x raised to the exponents in y.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))*2</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(1,3).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(1,3).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 2., 2., 2.],</span>
<span class="sd"> [ 2., 2., 2.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 1.],</span>
<span class="sd"> [ 2.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 1.],</span>
<span class="sd"> [ 2.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x**2).asnumpy()</span>
<span class="sd"> array([[ 4., 4., 4.],</span>
<span class="sd"> [ 4., 4., 4.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x**y).asnumpy()</span>
<span class="sd"> array([[ 2., 2., 2.],</span>
<span class="sd"> [ 4., 4., 4.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.power(x,y).asnumpy()</span>
<span class="sd"> array([[ 2., 2., 2.],</span>
<span class="sd"> [ 4., 4., 4.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (z**y).asnumpy()</span>
<span class="sd"> array([[ 1.],</span>
<span class="sd"> [ 4.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">base</span><span class="p">,</span>
<span class="n">exp</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_power</span><span class="p">,</span>
<span class="n">operator</span><span class="o">.</span><span class="n">pow</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_power_scalar</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_rpower_scalar</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="maximum"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.maximum">[docs]</a><span class="k">def</span> <span class="nf">maximum</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns element-wise maximum of the input arrays with broadcasting.</span>
<span class="sd"> Equivalent to ``mx.nd.broadcast_maximum(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array to be compared.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array to be compared. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> The element-wise maximum of the input arrays.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.maximum(x, 2).asnumpy()</span>
<span class="sd"> array([[ 2., 2., 2.],</span>
<span class="sd"> [ 2., 2., 2.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.maximum(x, y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.maximum(y, z).asnumpy()</span>
<span class="sd"> array([[ 0., 1.],</span>
<span class="sd"> [ 1., 1.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_maximum</span><span class="p">,</span>
<span class="k">lambda</span> <span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">:</span> <span class="n">x</span> <span class="k">if</span> <span class="n">x</span> <span class="o">&gt;</span> <span class="n">y</span> <span class="k">else</span> <span class="n">y</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_maximum_scalar</span><span class="p">,</span>
<span class="kc">None</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="minimum"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.minimum">[docs]</a><span class="k">def</span> <span class="nf">minimum</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns element-wise minimum of the input arrays with broadcasting.</span>
<span class="sd"> Equivalent to ``mx.nd.broadcast_minimum(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array to be compared.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array to be compared. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> The element-wise minimum of the input arrays.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.minimum(x, 2).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.minimum(x, y).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.minimum(z, y).asnumpy()</span>
<span class="sd"> array([[ 0., 0.],</span>
<span class="sd"> [ 0., 1.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_minimum</span><span class="p">,</span>
<span class="k">lambda</span> <span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">:</span> <span class="n">x</span> <span class="k">if</span> <span class="n">x</span> <span class="o">&lt;</span> <span class="n">y</span> <span class="k">else</span> <span class="n">y</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_minimum_scalar</span><span class="p">,</span>
<span class="kc">None</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="equal"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.equal">[docs]</a><span class="k">def</span> <span class="nf">equal</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns the result of element-wise **equal to** (==) comparison operation with</span>
<span class="sd"> broadcasting.</span>
<span class="sd"> For each element in input arrays, return 1(true) if corresponding elements are same,</span>
<span class="sd"> otherwise return 0(false).</span>
<span class="sd"> Equivalent to ``lhs == rhs`` and ``mx.nd.broadcast_equal(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array to be compared.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array to be compared. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> Output array of boolean values.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x == 1).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x == y).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.equal(x,y).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (z == y).asnumpy()</span>
<span class="sd"> array([[ 1., 0.],</span>
<span class="sd"> [ 0., 1.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_equal</span><span class="p">,</span>
<span class="k">lambda</span> <span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">:</span> <span class="mi">1</span> <span class="k">if</span> <span class="n">x</span> <span class="o">==</span> <span class="n">y</span> <span class="k">else</span> <span class="mi">0</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_equal_scalar</span><span class="p">,</span>
<span class="kc">None</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="not_equal"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.not_equal">[docs]</a><span class="k">def</span> <span class="nf">not_equal</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns the result of element-wise **not equal to** (!=) comparison operation</span>
<span class="sd"> with broadcasting.</span>
<span class="sd"> For each element in input arrays, return 1(true) if corresponding elements are different,</span>
<span class="sd"> otherwise return 0(false).</span>
<span class="sd"> Equivalent to ``lhs != rhs`` and ``mx.nd.broadcast_not_equal(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array to be compared.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array to be compared. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> Output array of boolean values.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (z == y).asnumpy()</span>
<span class="sd"> array([[ 1., 0.],</span>
<span class="sd"> [ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x != 1).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 0., 0., 0.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x != y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 0., 0., 0.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.not_equal(x, y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 0., 0., 0.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (z != y).asnumpy()</span>
<span class="sd"> array([[ 0., 1.],</span>
<span class="sd"> [ 1., 0.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_not_equal</span><span class="p">,</span>
<span class="k">lambda</span> <span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">:</span> <span class="mi">1</span> <span class="k">if</span> <span class="n">x</span> <span class="o">!=</span> <span class="n">y</span> <span class="k">else</span> <span class="mi">0</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_not_equal_scalar</span><span class="p">,</span>
<span class="kc">None</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="greater"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.greater">[docs]</a><span class="k">def</span> <span class="nf">greater</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns the result of element-wise **greater than** (&gt;) comparison operation</span>
<span class="sd"> with broadcasting.</span>
<span class="sd"> For each element in input arrays, return 1(true) if lhs elements are greater than rhs,</span>
<span class="sd"> otherwise return 0(false).</span>
<span class="sd"> Equivalent to ``lhs &gt; rhs`` and ``mx.nd.broadcast_greater(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array to be compared.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array to be compared. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> Output array of boolean values.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x &gt; 1).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 0., 0., 0.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x &gt; y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 0., 0., 0.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.greater(x, y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 0., 0., 0.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (z &gt; y).asnumpy()</span>
<span class="sd"> array([[ 0., 1.],</span>
<span class="sd"> [ 0., 0.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_greater</span><span class="p">,</span>
<span class="k">lambda</span> <span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">:</span> <span class="mi">1</span> <span class="k">if</span> <span class="n">x</span> <span class="o">&gt;</span> <span class="n">y</span> <span class="k">else</span> <span class="mi">0</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_greater_scalar</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_lesser_scalar</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="greater_equal"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.greater_equal">[docs]</a><span class="k">def</span> <span class="nf">greater_equal</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns the result of element-wise **greater than or equal to** (&gt;=) comparison</span>
<span class="sd"> operation with broadcasting.</span>
<span class="sd"> For each element in input arrays, return 1(true) if lhs elements are greater than equal to rhs,</span>
<span class="sd"> otherwise return 0(false).</span>
<span class="sd"> Equivalent to ``lhs &gt;= rhs`` and ``mx.nd.broadcast_greater_equal(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array to be compared.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array to be compared. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> Output array of boolean values.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x &gt;= 1).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x &gt;= y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.greater_equal(x, y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (z &gt;= y).asnumpy()</span>
<span class="sd"> array([[ 1., 1.],</span>
<span class="sd"> [ 0., 1.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_greater_equal</span><span class="p">,</span>
<span class="k">lambda</span> <span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">:</span> <span class="mi">1</span> <span class="k">if</span> <span class="n">x</span> <span class="o">&gt;=</span> <span class="n">y</span> <span class="k">else</span> <span class="mi">0</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_greater_equal_scalar</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_lesser_equal_scalar</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="lesser"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.lesser">[docs]</a><span class="k">def</span> <span class="nf">lesser</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns the result of element-wise **lesser than** (&lt;) comparison operation</span>
<span class="sd"> with broadcasting.</span>
<span class="sd"> For each element in input arrays, return 1(true) if lhs elements are less than rhs,</span>
<span class="sd"> otherwise return 0(false).</span>
<span class="sd"> Equivalent to ``lhs &lt; rhs`` and ``mx.nd.broadcast_lesser(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array to be compared.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array to be compared. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> Output array of boolean values.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x &lt; 1).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 0., 0., 0.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x &lt; y).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 0., 0., 0.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.lesser(x, y).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 0., 0., 0.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (z &lt; y).asnumpy()</span>
<span class="sd"> array([[ 0., 0.],</span>
<span class="sd"> [ 1., 0.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_lesser</span><span class="p">,</span>
<span class="k">lambda</span> <span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">:</span> <span class="mi">1</span> <span class="k">if</span> <span class="n">x</span> <span class="o">&lt;</span> <span class="n">y</span> <span class="k">else</span> <span class="mi">0</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_lesser_scalar</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_greater_scalar</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="lesser_equal"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.lesser_equal">[docs]</a><span class="k">def</span> <span class="nf">lesser_equal</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns the result of element-wise **lesser than or equal to** (&lt;=) comparison</span>
<span class="sd"> operation with broadcasting.</span>
<span class="sd"> For each element in input arrays, return 1(true) if lhs elements are</span>
<span class="sd"> lesser than equal to rhs, otherwise return 0(false).</span>
<span class="sd"> Equivalent to ``lhs &lt;= rhs`` and ``mx.nd.broadcast_lesser_equal(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First array to be compared.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second array to be compared. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> Output array of boolean values.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x &lt;= 1).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (x &lt;= y).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.lesser_equal(x, y).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; (z &lt;= y).asnumpy()</span>
<span class="sd"> array([[ 1., 0.],</span>
<span class="sd"> [ 1., 1.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_lesser_equal</span><span class="p">,</span>
<span class="k">lambda</span> <span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">:</span> <span class="mi">1</span> <span class="k">if</span> <span class="n">x</span> <span class="o">&lt;=</span> <span class="n">y</span> <span class="k">else</span> <span class="mi">0</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_lesser_equal_scalar</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_greater_equal_scalar</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="logical_and"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.logical_and">[docs]</a><span class="k">def</span> <span class="nf">logical_and</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns the result of element-wise **logical and** comparison</span>
<span class="sd"> operation with broadcasting.</span>
<span class="sd"> For each element in input arrays, return 1(true) if lhs elements and rhs elements</span>
<span class="sd"> are true, otherwise return 0(false).</span>
<span class="sd"> Equivalent to ``lhs and rhs`` and ``mx.nd.broadcast_logical_and(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First input of the function.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second input of the function. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> Output array of boolean values.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.logical_and(x, 1).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.logical_and(x, y).asnumpy()</span>
<span class="sd"> array([[ 0., 0., 0.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.logical_and(z, y).asnumpy()</span>
<span class="sd"> array([[ 0., 0.],</span>
<span class="sd"> [ 0., 1.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_logical_and</span><span class="p">,</span>
<span class="k">lambda</span> <span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">:</span> <span class="mi">1</span> <span class="k">if</span> <span class="n">x</span> <span class="ow">and</span> <span class="n">y</span> <span class="k">else</span> <span class="mi">0</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_logical_and_scalar</span><span class="p">,</span>
<span class="kc">None</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="logical_or"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.logical_or">[docs]</a><span class="k">def</span> <span class="nf">logical_or</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns the result of element-wise **logical or** comparison</span>
<span class="sd"> operation with broadcasting.</span>
<span class="sd"> For each element in input arrays, return 1(true) if lhs elements or rhs elements</span>
<span class="sd"> are true, otherwise return 0(false).</span>
<span class="sd"> Equivalent to ``lhs or rhs`` and ``mx.nd.broadcast_logical_or(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First input of the function.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second input of the function. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> Output array of boolean values.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.logical_or(x, 1).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.logical_or(x, y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.logical_or(z, y).asnumpy()</span>
<span class="sd"> array([[ 0., 1.],</span>
<span class="sd"> [ 1., 1.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_logical_or</span><span class="p">,</span>
<span class="k">lambda</span> <span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">:</span> <span class="mi">1</span> <span class="k">if</span> <span class="n">x</span> <span class="ow">or</span> <span class="n">y</span> <span class="k">else</span> <span class="mi">0</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_logical_or_scalar</span><span class="p">,</span>
<span class="kc">None</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="logical_xor"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.logical_xor">[docs]</a><span class="k">def</span> <span class="nf">logical_xor</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns the result of element-wise **logical xor** comparison</span>
<span class="sd"> operation with broadcasting.</span>
<span class="sd"> For each element in input arrays, return 1(true) if lhs elements or rhs elements</span>
<span class="sd"> are true, otherwise return 0(false).</span>
<span class="sd"> Equivalent to ``bool(lhs) ^ bool(rhs)`` and ``mx.nd.broadcast_logical_xor(lhs, rhs)``.</span>
<span class="sd"> .. note::</span>
<span class="sd"> If the corresponding dimensions of two arrays have the same size or one of them has size 1,</span>
<span class="sd"> then the arrays are broadcastable to a common shape.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> lhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> First input of the function.</span>
<span class="sd"> rhs : scalar or mxnet.ndarray.array</span>
<span class="sd"> Second input of the function. If ``lhs.shape != rhs.shape``, they must be</span>
<span class="sd"> broadcastable to a common shape.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> Output array of boolean values.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="sd"> &gt;&gt;&gt; y = mx.nd.arange(2).reshape((2,1))</span>
<span class="sd"> &gt;&gt;&gt; z = mx.nd.arange(2).reshape((1,2))</span>
<span class="sd"> &gt;&gt;&gt; x.asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 1., 1., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; y.asnumpy()</span>
<span class="sd"> array([[ 0.],</span>
<span class="sd"> [ 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; z.asnumpy()</span>
<span class="sd"> array([[ 0., 1.]], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.logical_xor(x, y).asnumpy()</span>
<span class="sd"> array([[ 1., 1., 1.],</span>
<span class="sd"> [ 0., 0., 0.]], dtype=float32)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_ufunc_helper</span><span class="p">(</span>
<span class="n">lhs</span><span class="p">,</span>
<span class="n">rhs</span><span class="p">,</span>
<span class="n">op</span><span class="o">.</span><span class="n">broadcast_logical_xor</span><span class="p">,</span>
<span class="k">lambda</span> <span class="n">x</span><span class="p">,</span> <span class="n">y</span><span class="p">:</span> <span class="mi">1</span> <span class="k">if</span> <span class="nb">bool</span><span class="p">(</span><span class="n">x</span><span class="p">)</span> <span class="o">^</span> <span class="nb">bool</span><span class="p">(</span><span class="n">y</span><span class="p">)</span> <span class="k">else</span> <span class="mi">0</span><span class="p">,</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_logical_xor_scalar</span><span class="p">,</span>
<span class="kc">None</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="true_divide"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.true_divide">[docs]</a><span class="k">def</span> <span class="nf">true_divide</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;This function is similar to :meth:`divide`.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">return</span> <span class="n">divide</span><span class="p">(</span><span class="n">lhs</span><span class="p">,</span> <span class="n">rhs</span><span class="p">)</span></div>
<div class="viewcode-block" id="concatenate"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.concatenate">[docs]</a><span class="k">def</span> <span class="nf">concatenate</span><span class="p">(</span><span class="n">arrays</span><span class="p">,</span> <span class="n">axis</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">always_copy</span><span class="o">=</span><span class="kc">True</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;DEPRECATED, use ``concat`` instead</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> arrays : list of `NDArray`</span>
<span class="sd"> Arrays to be concatenate. They must have identical shape except</span>
<span class="sd"> the first dimension. They also must have the same data type.</span>
<span class="sd"> axis : int</span>
<span class="sd"> The axis along which to concatenate.</span>
<span class="sd"> always_copy : bool</span>
<span class="sd"> Default `True`. When not `True`, if the arrays only contain one</span>
<span class="sd"> `NDArray`, that element will be returned directly, avoid copying.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> An `NDArray` that lives on the same context as `arrays[0].context`.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># Unsupported in deferred compute mode due to use of inplace operations.</span>
<span class="kn">from</span> <span class="nn">.._deferred_compute</span> <span class="kn">import</span> <span class="n">is_deferred_compute</span> <span class="c1"># pylint: disable=wrong-import-position</span>
<span class="k">assert</span> <span class="ow">not</span> <span class="n">is_deferred_compute</span><span class="p">(),</span> <span class="s1">&#39;nd.concatenate is deprecated and &#39;</span> \
<span class="s1">&#39;unsupported in deferred compute mode. Use nd.concat instead.&#39;</span>
<span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">arrays</span><span class="p">,</span> <span class="nb">list</span><span class="p">)</span>
<span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="n">arrays</span><span class="p">)</span> <span class="o">&gt;</span> <span class="mi">0</span>
<span class="k">assert</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">arrays</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">NDArray</span><span class="p">)</span>
<span class="k">if</span> <span class="ow">not</span> <span class="n">always_copy</span> <span class="ow">and</span> <span class="nb">len</span><span class="p">(</span><span class="n">arrays</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="k">return</span> <span class="n">arrays</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
<span class="n">shape_axis</span> <span class="o">=</span> <span class="n">arrays</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">axis</span><span class="p">]</span>
<span class="n">shape_rest1</span> <span class="o">=</span> <span class="n">arrays</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">:</span><span class="n">axis</span><span class="p">]</span>
<span class="n">shape_rest2</span> <span class="o">=</span> <span class="n">arrays</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">axis</span><span class="o">+</span><span class="mi">1</span><span class="p">:]</span>
<span class="n">dtype</span> <span class="o">=</span> <span class="n">arrays</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">dtype</span>
<span class="k">for</span> <span class="n">arr</span> <span class="ow">in</span> <span class="n">arrays</span><span class="p">[</span><span class="mi">1</span><span class="p">:]:</span>
<span class="n">shape_axis</span> <span class="o">+=</span> <span class="n">arr</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">axis</span><span class="p">]</span>
<span class="k">assert</span> <span class="n">shape_rest1</span> <span class="o">==</span> <span class="n">arr</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">:</span><span class="n">axis</span><span class="p">]</span>
<span class="k">assert</span> <span class="n">shape_rest2</span> <span class="o">==</span> <span class="n">arr</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">axis</span><span class="o">+</span><span class="mi">1</span><span class="p">:]</span>
<span class="k">assert</span> <span class="n">dtype</span> <span class="o">==</span> <span class="n">arr</span><span class="o">.</span><span class="n">dtype</span>
<span class="n">ret_shape</span> <span class="o">=</span> <span class="n">shape_rest1</span> <span class="o">+</span> <span class="p">(</span><span class="n">shape_axis</span><span class="p">,)</span> <span class="o">+</span> <span class="n">shape_rest2</span>
<span class="n">ret</span> <span class="o">=</span> <span class="n">empty</span><span class="p">(</span><span class="n">ret_shape</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="n">arrays</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">dtype</span><span class="p">)</span>
<span class="n">idx</span> <span class="o">=</span> <span class="mi">0</span>
<span class="n">begin</span> <span class="o">=</span> <span class="p">[</span><span class="mi">0</span> <span class="k">for</span> <span class="n">_</span> <span class="ow">in</span> <span class="n">ret_shape</span><span class="p">]</span>
<span class="n">end</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">ret_shape</span><span class="p">)</span>
<span class="k">for</span> <span class="n">arr</span> <span class="ow">in</span> <span class="n">arrays</span><span class="p">:</span>
<span class="k">if</span> <span class="n">axis</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="n">ret</span><span class="p">[</span><span class="n">idx</span><span class="p">:</span><span class="n">idx</span><span class="o">+</span><span class="n">arr</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]]</span> <span class="o">=</span> <span class="n">arr</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">begin</span><span class="p">[</span><span class="n">axis</span><span class="p">]</span> <span class="o">=</span> <span class="n">idx</span>
<span class="n">end</span><span class="p">[</span><span class="n">axis</span><span class="p">]</span> <span class="o">=</span> <span class="n">idx</span><span class="o">+</span><span class="n">arr</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">axis</span><span class="p">]</span>
<span class="c1"># pylint: disable=no-member,protected-access</span>
<span class="n">_internal</span><span class="o">.</span><span class="n">_crop_assign</span><span class="p">(</span><span class="n">ret</span><span class="p">,</span> <span class="n">arr</span><span class="p">,</span> <span class="n">out</span><span class="o">=</span><span class="n">ret</span><span class="p">,</span>
<span class="n">begin</span><span class="o">=</span><span class="nb">tuple</span><span class="p">(</span><span class="n">begin</span><span class="p">),</span>
<span class="n">end</span><span class="o">=</span><span class="nb">tuple</span><span class="p">(</span><span class="n">end</span><span class="p">))</span>
<span class="c1"># pylint: enable=no-member,protected-access</span>
<span class="n">idx</span> <span class="o">+=</span> <span class="n">arr</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">axis</span><span class="p">]</span>
<span class="k">return</span> <span class="n">ret</span></div>
<span class="c1"># pylint: disable=redefined-outer-name</span>
<div class="viewcode-block" id="imdecode"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.imdecode">[docs]</a><span class="k">def</span> <span class="nf">imdecode</span><span class="p">(</span><span class="n">str_img</span><span class="p">,</span> <span class="n">clip_rect</span><span class="o">=</span><span class="p">(</span><span class="mi">0</span><span class="p">,</span> <span class="mi">0</span><span class="p">,</span> <span class="mi">0</span><span class="p">,</span> <span class="mi">0</span><span class="p">),</span> <span class="n">out</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">index</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">channels</span><span class="o">=</span><span class="mi">3</span><span class="p">,</span> <span class="n">mean</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;DEPRECATED, use mx.img instead</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> str_img : str</span>
<span class="sd"> Binary image data</span>
<span class="sd"> clip_rect : iterable of 4 int</span>
<span class="sd"> Clip decoded image to rectangle (x0, y0, x1, y1).</span>
<span class="sd"> out : NDArray</span>
<span class="sd"> Output buffer. Can be 3 dimensional (c, h, w) or 4 dimensional (n, c, h, w).</span>
<span class="sd"> index : int</span>
<span class="sd"> Output decoded image to i-th slice of 4 dimensional buffer.</span>
<span class="sd"> channels : int</span>
<span class="sd"> Number of channels to output. Decode to grey scale when channels = 1.</span>
<span class="sd"> mean : NDArray</span>
<span class="sd"> Subtract mean from decode image before outputing.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access, too-many-arguments</span>
<span class="k">if</span> <span class="n">mean</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">mean</span> <span class="o">=</span> <span class="n">NDArray</span><span class="p">(</span><span class="n">_new_empty_handle</span><span class="p">())</span>
<span class="k">if</span> <span class="n">out</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_imdecode</span><span class="p">(</span><span class="n">mean</span><span class="p">,</span> <span class="n">index</span><span class="p">,</span>
<span class="n">clip_rect</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span>
<span class="n">clip_rect</span><span class="p">[</span><span class="mi">1</span><span class="p">],</span>
<span class="n">clip_rect</span><span class="p">[</span><span class="mi">2</span><span class="p">],</span>
<span class="n">clip_rect</span><span class="p">[</span><span class="mi">3</span><span class="p">],</span>
<span class="n">channels</span><span class="p">,</span>
<span class="nb">len</span><span class="p">(</span><span class="n">str_img</span><span class="p">),</span>
<span class="n">str_img</span><span class="o">=</span><span class="n">str_img</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_imdecode</span><span class="p">(</span><span class="n">mean</span><span class="p">,</span> <span class="n">index</span><span class="p">,</span>
<span class="n">clip_rect</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span>
<span class="n">clip_rect</span><span class="p">[</span><span class="mi">1</span><span class="p">],</span>
<span class="n">clip_rect</span><span class="p">[</span><span class="mi">2</span><span class="p">],</span>
<span class="n">clip_rect</span><span class="p">[</span><span class="mi">3</span><span class="p">],</span>
<span class="n">channels</span><span class="p">,</span>
<span class="nb">len</span><span class="p">(</span><span class="n">str_img</span><span class="p">),</span>
<span class="n">str_img</span><span class="o">=</span><span class="n">str_img</span><span class="p">,</span>
<span class="n">out</span><span class="o">=</span><span class="n">out</span><span class="p">)</span></div>
<span class="k">def</span> <span class="nf">zeros</span><span class="p">(</span><span class="n">shape</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a new array filled with all zeros, with the given shape and type.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> shape : int or tuple of int</span>
<span class="sd"> The shape of the empty array.</span>
<span class="sd"> ctx : Context, optional</span>
<span class="sd"> An optional device context (default is the current default context).</span>
<span class="sd"> dtype : str or numpy.dtype, optional</span>
<span class="sd"> An optional value type (default is `float32`).</span>
<span class="sd"> out : NDArray, optional</span>
<span class="sd"> The output NDArray (default is `None`).</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> A created array</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.zeros(1).asnumpy()</span>
<span class="sd"> array([ 0.], dtype=float32)</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.zeros((1,2), mx.gpu(0))</span>
<span class="sd"> &lt;NDArray 1x2 @gpu(0)&gt;</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.zeros((1,2), mx.gpu(0), &#39;float16&#39;).asnumpy()</span>
<span class="sd"> array([[ 0., 0.]], dtype=float16)</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= unused-argument</span>
<span class="k">if</span> <span class="n">ctx</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">ctx</span> <span class="o">=</span> <span class="n">current_device</span><span class="p">()</span>
<span class="n">dtype</span> <span class="o">=</span> <span class="n">mx_real_t</span> <span class="k">if</span> <span class="n">dtype</span> <span class="ow">is</span> <span class="kc">None</span> <span class="k">else</span> <span class="n">dtype</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_zeros</span><span class="p">(</span><span class="n">shape</span><span class="o">=</span><span class="n">shape</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">dtype</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<div class="viewcode-block" id="eye"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.eye">[docs]</a><span class="k">def</span> <span class="nf">eye</span><span class="p">(</span><span class="n">N</span><span class="p">,</span> <span class="n">M</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">k</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Return a 2-D array with ones on the diagonal and zeros elsewhere.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> N: int</span>
<span class="sd"> Number of rows in the output.</span>
<span class="sd"> M: int, optional</span>
<span class="sd"> Number of columns in the output. If 0, defaults to N.</span>
<span class="sd"> k: int, optional</span>
<span class="sd"> Index of the diagonal: 0 (the default) refers to the main diagonal,</span>
<span class="sd"> a positive value refers to an upper diagonal,</span>
<span class="sd"> and a negative value to a lower diagonal.</span>
<span class="sd"> ctx: Context, optional</span>
<span class="sd"> An optional device context (default is the current default context)</span>
<span class="sd"> dtype: str or numpy.dtype, optional</span>
<span class="sd"> An optional value type (default is `float32`)</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> A created array</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.eye(2)</span>
<span class="sd"> [[ 1. 0.]</span>
<span class="sd"> [ 0. 1.]]</span>
<span class="sd"> &lt;NDArray 2x2 @cpu(0)&gt;</span>
<span class="sd"> &gt;&gt;&gt; mx.nd.eye(2, 3, 1)</span>
<span class="sd"> [[ 0. 1. 0.]</span>
<span class="sd"> [ 0. 0. 1.]]</span>
<span class="sd"> &lt;NDArray 2x3 @cpu(0)&gt;</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= unused-argument</span>
<span class="k">if</span> <span class="n">ctx</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">ctx</span> <span class="o">=</span> <span class="n">current_device</span><span class="p">()</span>
<span class="n">dtype</span> <span class="o">=</span> <span class="n">mx_real_t</span> <span class="k">if</span> <span class="n">dtype</span> <span class="ow">is</span> <span class="kc">None</span> <span class="k">else</span> <span class="n">dtype</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_eye</span><span class="p">(</span><span class="n">N</span><span class="o">=</span><span class="n">N</span><span class="p">,</span> <span class="n">M</span><span class="o">=</span><span class="n">M</span><span class="p">,</span> <span class="n">k</span><span class="o">=</span><span class="n">k</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="n">ctx</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">dtype</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access</span>
<span class="k">def</span> <span class="nf">empty</span><span class="p">(</span><span class="n">shape</span><span class="p">,</span> <span class="n">ctx</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Returns a new array of given shape and type, without initializing entries.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> shape : int or tuple of int</span>
<span class="sd"> The shape of the empty array.</span>
<span class="sd"> ctx : Context, optional</span>
<span class="sd"> An optional device context (default is the current default context).</span>
<span class="sd"> dtype : str or numpy.dtype, optional</span>
<span class="sd"> An optional value type (default is `float32`).</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> A created array.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">shape</span><span class="p">,</span> <span class="nb">int</span><span class="p">):</span>
<span class="n">shape</span> <span class="o">=</span> <span class="p">(</span><span class="n">shape</span><span class="p">,</span> <span class="p">)</span>
<span class="k">if</span> <span class="n">ctx</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">ctx</span> <span class="o">=</span> <span class="n">current_device</span><span class="p">()</span>
<span class="k">if</span> <span class="n">dtype</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">dtype</span> <span class="o">=</span> <span class="n">mx_real_t</span>
<span class="k">return</span> <span class="n">NDArray</span><span class="p">(</span><span class="n">handle</span><span class="o">=</span><span class="n">_new_alloc_handle</span><span class="p">(</span><span class="n">shape</span><span class="p">,</span> <span class="n">ctx</span><span class="p">,</span> <span class="kc">False</span><span class="p">,</span> <span class="n">dtype</span><span class="p">))</span>
<span class="c1"># pylint: disable= redefined-builtin</span>
<div class="viewcode-block" id="histogram"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.histogram">[docs]</a><span class="k">def</span> <span class="nf">histogram</span><span class="p">(</span><span class="n">a</span><span class="p">,</span> <span class="n">bins</span><span class="o">=</span><span class="mi">10</span><span class="p">,</span> <span class="nb">range</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Compute the histogram of the input data.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> a : NDArray</span>
<span class="sd"> Input data. The histogram is computed over the flattened array.</span>
<span class="sd"> bins : int or sequence of scalars</span>
<span class="sd"> If bins is an int, it defines the number of equal-width bins in the</span>
<span class="sd"> given range (10, by default). If bins is a sequence, it defines the bin edges,</span>
<span class="sd"> including the rightmost edge, allowing for non-uniform bin widths.</span>
<span class="sd"> range : (float, float), optional</span>
<span class="sd"> The lower and upper range of the bins. If not provided, range is simply (a.min(), a.max()).</span>
<span class="sd"> Values outside the range are ignored. The first element of the range must be less than or</span>
<span class="sd"> equal to the second. range affects the automatic bin computation as well, the range will</span>
<span class="sd"> be equally divided by the number of bins.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> A created array.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="c1"># pylint: disable= no-member, protected-access</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">bins</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">):</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_histogram</span><span class="p">(</span><span class="n">data</span><span class="o">=</span><span class="n">a</span><span class="p">,</span> <span class="n">bins</span><span class="o">=</span><span class="n">bins</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">bins</span><span class="p">,</span> <span class="n">integer_types</span><span class="p">):</span>
<span class="k">if</span> <span class="nb">range</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">warnings</span><span class="o">.</span><span class="n">warn</span><span class="p">(</span><span class="s2">&quot;range is not specified, using numpy&#39;s result &quot;</span>
<span class="s2">&quot;to ensure consistency with numpy&quot;</span><span class="p">)</span>
<span class="n">res</span><span class="p">,</span> <span class="n">bin_bounds</span> <span class="o">=</span> <span class="n">np</span><span class="o">.</span><span class="n">histogram</span><span class="p">(</span><span class="n">a</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">(),</span> <span class="n">bins</span><span class="o">=</span><span class="n">bins</span><span class="p">)</span>
<span class="k">return</span> <span class="n">array</span><span class="p">(</span><span class="n">res</span><span class="p">),</span> <span class="n">array</span><span class="p">(</span><span class="n">bin_bounds</span><span class="p">)</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_histogram</span><span class="p">(</span><span class="n">data</span><span class="o">=</span><span class="n">a</span><span class="p">,</span> <span class="n">bin_cnt</span><span class="o">=</span><span class="n">bins</span><span class="p">,</span> <span class="nb">range</span><span class="o">=</span><span class="nb">range</span><span class="p">)</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">&quot;bins argument should be either an integer or an NDArray&quot;</span><span class="p">)</span></div>
<span class="c1"># pylint: enable= no-member, protected-access, redefined-builtin</span>
<div class="viewcode-block" id="split_v2"><a class="viewcode-back" href="../../../api/legacy/ndarray/ndarray.html#mxnet.ndarray.split_v2">[docs]</a><span class="k">def</span> <span class="nf">split_v2</span><span class="p">(</span><span class="n">ary</span><span class="p">,</span> <span class="n">indices_or_sections</span><span class="p">,</span> <span class="n">axis</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span> <span class="n">squeeze_axis</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Split an array into multiple sub-arrays.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> ary : NDArray</span>
<span class="sd"> Array to be divided into sub-arrays.</span>
<span class="sd"> indices_or_sections : int or tuple of ints</span>
<span class="sd"> If `indices_or_sections` is an integer, N, the array will be divided</span>
<span class="sd"> into N equal arrays along `axis`. If such a split is not possible,</span>
<span class="sd"> an error is raised.</span>
<span class="sd"> If `indices_or_sections` is a 1-D array of sorted integers, the entries</span>
<span class="sd"> indicate where along `axis` the array is split. For example,</span>
<span class="sd"> ``[2, 3]`` would, for ``axis=0``, result in</span>
<span class="sd"> - ary[:2]</span>
<span class="sd"> - ary[2:3]</span>
<span class="sd"> - ary[3:]</span>
<span class="sd"> If an index exceeds the dimension of the array along `axis`,</span>
<span class="sd"> an empty sub-array is returned correspondingly.</span>
<span class="sd"> axis : int, optional</span>
<span class="sd"> The axis along which to split, default is 0.</span>
<span class="sd"> squeeze_axis: boolean, optional</span>
<span class="sd"> Whether to squeeze the axis of sub-arrays or not, only useful when size</span>
<span class="sd"> of the sub-arrays are 1 on the `axis`. Default is False.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> NDArray</span>
<span class="sd"> A created array.</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="n">indices</span> <span class="o">=</span> <span class="p">[]</span>
<span class="n">axis_size</span> <span class="o">=</span> <span class="n">ary</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="n">axis</span><span class="p">]</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">indices_or_sections</span><span class="p">,</span> <span class="nb">int</span><span class="p">):</span>
<span class="n">sections</span> <span class="o">=</span> <span class="n">indices_or_sections</span>
<span class="k">if</span> <span class="n">axis_size</span> <span class="o">%</span> <span class="n">sections</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;array split does not result in an equal division&#39;</span><span class="p">)</span>
<span class="n">section_size</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">axis_size</span> <span class="o">/</span> <span class="n">sections</span><span class="p">)</span>
<span class="n">indices</span> <span class="o">=</span> <span class="p">[</span><span class="n">i</span> <span class="o">*</span> <span class="n">section_size</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">sections</span><span class="p">)]</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">indices_or_sections</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">):</span>
<span class="n">indices</span> <span class="o">=</span> <span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">+</span> <span class="nb">list</span><span class="p">(</span><span class="n">indices_or_sections</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s1">&#39;indices_or_sections must either int or tuple of ints&#39;</span><span class="p">)</span>
<span class="k">return</span> <span class="n">_internal</span><span class="o">.</span><span class="n">_split_v2</span><span class="p">(</span><span class="n">ary</span><span class="p">,</span> <span class="n">indices</span><span class="p">,</span> <span class="n">axis</span><span class="p">,</span> <span class="n">squeeze_axis</span><span class="p">)</span></div>
<span class="n">from_dlpack</span> <span class="o">=</span> <span class="n">ndarray_from_dlpack</span><span class="p">(</span><span class="n">NDArray</span><span class="p">)</span>
<span class="n">from_dlpack_doc</span> <span class="o">=</span> <span class="s2">&quot;&quot;&quot;Returns a NDArray backed by a dlpack tensor.</span>
<span class="s2"> Parameters</span>
<span class="s2"> ----------</span>
<span class="s2"> dlpack: PyCapsule (the pointer of DLManagedTensor)</span>
<span class="s2"> input data</span>
<span class="s2"> Returns</span>
<span class="s2"> -------</span>
<span class="s2"> NDArray</span>
<span class="s2"> a NDArray backed by a dlpack tensor</span>
<span class="s2"> Examples</span>
<span class="s2"> --------</span>
<span class="s2"> &gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="s2"> &gt;&gt;&gt; y = mx.nd.to_dlpack_for_read(x)</span>
<span class="s2"> &gt;&gt;&gt; type(y)</span>
<span class="s2"> &lt;class &#39;PyCapsule&#39;&gt;</span>
<span class="s2"> &gt;&gt;&gt; z = mx.nd.from_dlpack(y)</span>
<span class="s2"> &gt;&gt;&gt; type(z)</span>
<span class="s2"> &lt;class &#39;mxnet.ndarray.ndarray.NDArray&#39;&gt;</span>
<span class="s2"> &gt;&gt;&gt; z</span>
<span class="s2"> [[ 1. 1. 1.]</span>
<span class="s2"> [ 1. 1. 1.]]</span>
<span class="s2"> &lt;NDArray 2x3 @cpu(0)&gt;</span>
<span class="s2"> &gt;&gt;&gt; w = mx.nd.to_dlpack_for_write(x)</span>
<span class="s2"> &gt;&gt;&gt; type(w)</span>
<span class="s2"> &lt;class &#39;PyCapsule&#39;&gt;</span>
<span class="s2"> &gt;&gt;&gt; u = mx.nd.from_dlpack(w)</span>
<span class="s2"> &gt;&gt;&gt; u += 1</span>
<span class="s2"> &gt;&gt;&gt; x</span>
<span class="s2"> [[2. 2. 2.]</span>
<span class="s2"> [2. 2. 2.]]</span>
<span class="s2"> &lt;NDArray 2x3 @cpu(0)&gt;</span>
<span class="s2"> &quot;&quot;&quot;</span>
<span class="n">from_dlpack</span><span class="o">.</span><span class="vm">__doc__</span> <span class="o">=</span> <span class="n">from_dlpack_doc</span>
<span class="n">from_numpy</span> <span class="o">=</span> <span class="n">ndarray_from_numpy</span><span class="p">(</span><span class="n">NDArray</span><span class="p">,</span> <span class="n">array</span><span class="p">)</span>
<span class="n">from_numpy_doc</span> <span class="o">=</span> <span class="s2">&quot;&quot;&quot;Returns an MXNet&#39;s NDArray backed by numpy&#39;s ndarray.</span>
<span class="s2"> When `zero_copy` is set to be true,</span>
<span class="s2"> this API consumes numpy&#39;s ndarray and produces MXNet&#39;s ndarray</span>
<span class="s2"> without having to copy the content. In this case, we disallow</span>
<span class="s2"> users to modify the given numpy ndarray, and it is suggested</span>
<span class="s2"> not to read the numpy ndarray as well for internal correctness.</span>
<span class="s2"> Parameters</span>
<span class="s2"> ----------</span>
<span class="s2"> ndarray: NDArray</span>
<span class="s2"> input data</span>
<span class="s2"> zero_copy: bool</span>
<span class="s2"> Whether we use DLPack&#39;s zero-copy conversion to convert to MXNet&#39;s NDArray.</span>
<span class="s2"> This is only available for c-contiguous arrays, i.e. array.flags[C_CONTIGUOUS] == True.</span>
<span class="s2"> Returns</span>
<span class="s2"> -------</span>
<span class="s2"> NDArray</span>
<span class="s2"> a NDArray backed by a dlpack tensor</span>
<span class="s2">&quot;&quot;&quot;</span>
<span class="n">from_numpy</span><span class="o">.</span><span class="vm">__doc__</span> <span class="o">=</span> <span class="n">from_numpy_doc</span>
<span class="n">to_dlpack_for_read</span> <span class="o">=</span> <span class="n">ndarray_to_dlpack_for_read</span><span class="p">()</span>
<span class="n">to_dlpack_for_read_doc</span> <span class="o">=</span> <span class="s2">&quot;&quot;&quot;Returns a reference view of NDArray that represents as DLManagedTensor until</span>
<span class="s2">all previous write operations on the current array are finished.</span>
<span class="s2">Parameters</span>
<span class="s2">----------</span>
<span class="s2">data: NDArray</span>
<span class="s2"> input data.</span>
<span class="s2">Returns</span>
<span class="s2">-------</span>
<span class="s2">PyCapsule (the pointer of DLManagedTensor)</span>
<span class="s2"> a reference view of NDArray that represents as DLManagedTensor.</span>
<span class="s2">Examples</span>
<span class="s2">--------</span>
<span class="s2">&gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="s2">&gt;&gt;&gt; y = mx.nd.to_dlpack_for_read(x)</span>
<span class="s2">&gt;&gt;&gt; type(y)</span>
<span class="s2">&lt;class &#39;PyCapsule&#39;&gt;</span>
<span class="s2">&gt;&gt;&gt; z = mx.nd.from_dlpack(y)</span>
<span class="s2">&gt;&gt;&gt; z</span>
<span class="s2">[[1. 1. 1.]</span>
<span class="s2"> [1. 1. 1.]]</span>
<span class="s2">&lt;NDArray 2x3 @cpu(0)&gt;</span>
<span class="s2">&quot;&quot;&quot;</span>
<span class="n">to_dlpack_for_read</span><span class="o">.</span><span class="vm">__doc__</span> <span class="o">=</span> <span class="n">to_dlpack_for_read_doc</span>
<span class="n">to_dlpack_for_write</span> <span class="o">=</span> <span class="n">ndarray_to_dlpack_for_write</span><span class="p">()</span>
<span class="n">to_dlpack_for_write_doc</span> <span class="o">=</span> <span class="s2">&quot;&quot;&quot;Returns a reference view of NDArray that represents as</span>
<span class="s2">DLManagedTensor until all previous read/write operations on the current array are finished.</span>
<span class="s2">Parameters</span>
<span class="s2">----------</span>
<span class="s2">data: NDArray</span>
<span class="s2"> input data.</span>
<span class="s2">Returns</span>
<span class="s2">-------</span>
<span class="s2">PyCapsule : the pointer of DLManagedTensor</span>
<span class="s2"> a reference view of NDArray that represents as DLManagedTensor.</span>
<span class="s2">Examples</span>
<span class="s2">--------</span>
<span class="s2">&gt;&gt;&gt; x = mx.nd.ones((2,3))</span>
<span class="s2">&gt;&gt;&gt; w = mx.nd.to_dlpack_for_write(x)</span>
<span class="s2">&gt;&gt;&gt; type(w)</span>
<span class="s2">&lt;class &#39;PyCapsule&#39;&gt;</span>
<span class="s2">&gt;&gt;&gt; u = mx.nd.from_dlpack(w)</span>
<span class="s2">&gt;&gt;&gt; u += 1</span>
<span class="s2">&gt;&gt;&gt; x</span>
<span class="s2">[[2. 2. 2.]</span>
<span class="s2"> [2. 2. 2.]]</span>
<span class="s2">&lt;NDArray 2x3 @cpu(0)&gt;</span>
<span class="s2">&quot;&quot;&quot;</span>
<span class="n">to_dlpack_for_write</span><span class="o">.</span><span class="vm">__doc__</span> <span class="o">=</span> <span class="n">to_dlpack_for_write_doc</span>
</pre></div>
<hr class="feedback-hr-top" />
<div class="feedback-container">
<div class="feedback-question">Did this page help you?</div>
<div class="feedback-answer-container">
<div class="feedback-answer yes-link" data-response="yes">Yes</div>
<div class="feedback-answer no-link" data-response="no">No</div>
</div>
<div class="feedback-thank-you">Thanks for your feedback!</div>
</div>
<hr class="feedback-hr-bottom" />
</div>
<div class="side-doc-outline">
<div class="side-doc-outline--content">
</div>
</div>
<div class="clearer"></div>
</div><div class="pagenation">
</div>
<footer class="site-footer h-card">
<div class="wrapper">
<div class="row">
<div class="col-4">
<h4 class="footer-category-title">Resources</h4>
<ul class="contact-list">
<li><a href="https://lists.apache.org/list.html?dev@mxnet.apache.org">Mailing list</a> <a class="u-email" href="mailto:dev-subscribe@mxnet.apache.org">(subscribe)</a></li>
<li><a href="https://discuss.mxnet.io">MXNet Discuss forum</a></li>
<li><a href="https://github.com/apache/mxnet/issues">Github Issues</a></li>
<li><a href="https://github.com/apache/mxnet/projects">Projects</a></li>
<li><a href="https://cwiki.apache.org/confluence/display/MXNET/Apache+MXNet+Home">Developer Wiki</a></li>
<li><a href="/community">Contribute To MXNet</a></li>
</ul>
</div>
<div class="col-4"><ul class="social-media-list"><li><a href="https://github.com/apache/mxnet"><svg class="svg-icon"><use xlink:href="../../../_static/minima-social-icons.svg#github"></use></svg> <span class="username">apache/mxnet</span></a></li><li><a href="https://www.twitter.com/apachemxnet"><svg class="svg-icon"><use xlink:href="../../../_static/minima-social-icons.svg#twitter"></use></svg> <span class="username">apachemxnet</span></a></li><li><a href="https://youtube.com/apachemxnet"><svg class="svg-icon"><use xlink:href="../../../_static/minima-social-icons.svg#youtube"></use></svg> <span class="username">apachemxnet</span></a></li></ul>
</div>
<div class="col-4 footer-text">
<p>A flexible and efficient library for deep learning.</p>
</div>
</div>
</div>
</footer>
<footer class="site-footer2">
<div class="wrapper">
<div class="row">
<div class="col-3">
<img src="../../../_static/apache_incubator_logo.png" class="footer-logo col-2">
</div>
<div class="footer-bottom-warning col-9">
<p>Apache MXNet is an effort undergoing incubation at <a href="http://www.apache.org/">The Apache Software Foundation</a> (ASF), <span style="font-weight:bold">sponsored by the <i>Apache Incubator</i></span>. Incubation is required
of all newly accepted projects until a further review indicates that the infrastructure,
communications, and decision making process have stabilized in a manner consistent with other
successful ASF projects. While incubation status is not necessarily a reflection of the completeness
or stability of the code, it does indicate that the project has yet to be fully endorsed by the ASF.
</p><p>"Copyright © 2017-2018, The Apache Software Foundation Apache MXNet, MXNet, Apache, the Apache
feather, and the Apache MXNet project logo are either registered trademarks or trademarks of the
Apache Software Foundation."</p>
</div>
</div>
</div>
</footer>
</body>
</html>