blob: 731c4cdc0f03a4f12e957ca98a0c33e4c1be2b20 [file] [log] [blame]
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8"/>
<meta content="IE=edge" http-equiv="X-UA-Compatible"/>
<meta content="width=device-width, initial-scale=1" name="viewport"/>
<meta content="mxnet.metric" property="og:title">
<meta content="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/og-logo.png" property="og:image">
<meta content="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/og-logo.png" property="og:image:secure_url">
<meta content="mxnet.metric" property="og:description"/>
<title>mxnet.metric — mxnet documentation</title>
<link crossorigin="anonymous" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css" integrity="sha384-1q8mTJOASx8j1Au+a5WDVnPi2lkFfwwEAa8hDDdjZlpLegxhjVME1fgjWPGmkzs7" rel="stylesheet"/>
<link href="https://maxcdn.bootstrapcdn.com/font-awesome/4.5.0/css/font-awesome.min.css" rel="stylesheet"/>
<link href="../../_static/basic.css" rel="stylesheet" type="text/css">
<link href="../../_static/pygments.css" rel="stylesheet" type="text/css">
<link href="../../_static/mxnet.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: '../../',
VERSION: '',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true,
SOURCELINK_SUFFIX: '.txt'
};
</script>
<script src="https://code.jquery.com/jquery-1.11.1.min.js" type="text/javascript"></script>
<script src="../../_static/underscore.js" type="text/javascript"></script>
<script src="../../_static/searchtools_custom.js" type="text/javascript"></script>
<script src="../../_static/doctools.js" type="text/javascript"></script>
<script src="../../_static/selectlang.js" type="text/javascript"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS-MML_HTMLorMML" type="text/javascript"></script>
<script type="text/javascript"> jQuery(function() { Search.loadIndex("/searchindex.js"); Search.init();}); </script>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new
Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-96378503-1', 'auto');
ga('send', 'pageview');
</script>
<!-- -->
<!-- <script type="text/javascript" src="../../_static/jquery.js"></script> -->
<!-- -->
<!-- <script type="text/javascript" src="../../_static/underscore.js"></script> -->
<!-- -->
<!-- <script type="text/javascript" src="../../_static/doctools.js"></script> -->
<!-- -->
<!-- <script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script> -->
<!-- -->
<link href="../../genindex.html" rel="index" title="Index">
<link href="../../search.html" rel="search" title="Search"/>
<link href="../index.html" rel="up" title="Module code"/>
<link href="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-icon.png" rel="icon" type="image/png"/>
</link></link></link></meta></meta></meta></head>
<body background="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet-background-compressed.jpeg" role="document">
<div class="content-block"><div class="navbar navbar-fixed-top">
<div class="container" id="navContainer">
<div class="innder" id="header-inner">
<h1 id="logo-wrap">
<a href="../../" id="logo"><img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/mxnet_logo.png"/></a>
</h1>
<nav class="nav-bar" id="main-nav">
<a class="main-nav-link" href="../../install/index.html">Install</a>
<span id="dropdown-menu-position-anchor">
<a aria-expanded="true" aria-haspopup="true" class="main-nav-link dropdown-toggle" data-toggle="dropdown" href="#" role="button">Gluon <span class="caret"></span></a>
<ul class="dropdown-menu navbar-menu" id="package-dropdown-menu">
<li><a class="main-nav-link" href="../../gluon/index.html">About</a></li>
<li><a class="main-nav-link" href="http://gluon.mxnet.io">Tutorials</a></li>
</ul>
</span>
<span id="dropdown-menu-position-anchor">
<a aria-expanded="true" aria-haspopup="true" class="main-nav-link dropdown-toggle" data-toggle="dropdown" href="#" role="button">API <span class="caret"></span></a>
<ul class="dropdown-menu navbar-menu" id="package-dropdown-menu">
<li><a class="main-nav-link" href="../../api/python/index.html">Python</a></li>
<li><a class="main-nav-link" href="../../api/c++/index.html">C++</a></li>
<li><a class="main-nav-link" href="../../api/clojure/index.html">Clojure</a></li>
<li><a class="main-nav-link" href="../../api/julia/index.html">Julia</a></li>
<li><a class="main-nav-link" href="../../api/perl/index.html">Perl</a></li>
<li><a class="main-nav-link" href="../../api/r/index.html">R</a></li>
<li><a class="main-nav-link" href="../../api/scala/index.html">Scala</a></li>
</ul>
</span>
<span id="dropdown-menu-position-anchor-docs">
<a aria-expanded="true" aria-haspopup="true" class="main-nav-link dropdown-toggle" data-toggle="dropdown" href="#" role="button">Docs <span class="caret"></span></a>
<ul class="dropdown-menu navbar-menu" id="package-dropdown-menu-docs">
<li><a class="main-nav-link" href="../../faq/index.html">FAQ</a></li>
<li><a class="main-nav-link" href="../../tutorials/index.html">Tutorials</a>
<li><a class="main-nav-link" href="https://github.com/apache/incubator-mxnet/tree/master/example">Examples</a></li>
<li><a class="main-nav-link" href="../../architecture/index.html">Architecture</a></li>
<li><a class="main-nav-link" href="../../api/python/gluon/model_zoo.html">Model Zoo</a></li>
<li><a class="main-nav-link" href="../../api/python/contrib/onnx.html">ONNX</a></li>
</li></ul>
</span>
<span id="dropdown-menu-position-anchor-community">
<a aria-expanded="true" aria-haspopup="true" class="main-nav-link dropdown-toggle" data-toggle="dropdown" href="#" role="button">Community <span class="caret"></span></a>
<ul class="dropdown-menu navbar-menu" id="package-dropdown-menu-community">
<li><a class="main-nav-link" href="http://discuss.mxnet.io">Forum</a></li>
<li><a class="main-nav-link" href="https://github.com/apache/incubator-mxnet">Github</a></li>
<li><a class="main-nav-link" href="../../community/contribute.html">Contribute</a></li>
<li><a class="main-nav-link" href="../../community/ecosystem.html">Ecosystem</a></li>
<li><a class="main-nav-link" href="../../community/powered_by.html">Powered By</a></li>
</ul>
</span>
<span id="dropdown-menu-position-anchor-version" style="position: relative"><a href="#" class="main-nav-link dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="true">Versions(master)<span class="caret"></span></a><ul id="package-dropdown-menu" class="dropdown-menu"><li><a class="main-nav-link" href=http://mxnet.incubator.apache.org/>master</a></li><li><a class="main-nav-link" href=http://mxnet.incubator.apache.org/versions/1.2.1/index.html>1.2.1</a></li><li><a class="main-nav-link" href=http://mxnet.incubator.apache.org/versions/1.1.0/index.html>1.1.0</a></li><li><a class="main-nav-link" href=http://mxnet.incubator.apache.org/versions/1.0.0/index.html>1.0.0</a></li><li><a class="main-nav-link" href=http://mxnet.incubator.apache.org/versions/0.12.1/index.html>0.12.1</a></li><li><a class="main-nav-link" href=http://mxnet.incubator.apache.org/versions/0.11.0/index.html>0.11.0</a></li></ul></span></nav>
<script> function getRootPath(){ return "../../" } </script>
<div class="burgerIcon dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#" role="button"></a>
<ul class="dropdown-menu" id="burgerMenu">
<li><a href="../../install/index.html">Install</a></li>
<li><a class="main-nav-link" href="../../tutorials/index.html">Tutorials</a></li>
<li class="dropdown-submenu dropdown">
<a aria-haspopup="true" class="dropdown-toggle burger-link" data-toggle="dropdown" href="#" role="button" tabindex="-1">Community</a>
<ul class="dropdown-menu">
<li><a href="http://discuss.mxnet.io" tabindex="-1">Forum</a></li>
<li><a href="https://github.com/apache/incubator-mxnet" tabindex="-1">Github</a></li>
<li><a href="../../community/contribute.html" tabindex="-1">Contribute</a></li>
<li><a href="../../community/ecosystem.html" tabindex="-1">Ecosystem</a></li>
<li><a href="../../community/powered_by.html" tabindex="-1">Powered By</a></li>
</ul>
</li>
<li class="dropdown-submenu">
<a aria-haspopup="true" class="dropdown-toggle burger-link" data-toggle="dropdown" href="#" role="button" tabindex="-1">API</a>
<ul class="dropdown-menu">
<li><a href="../../api/python/index.html" tabindex="-1">Python</a>
</li>
<li><a href="../../api/c++/index.html" tabindex="-1">C++</a>
</li>
<li><a href="../../api/clojure/index.html" tabindex="-1">Clojure</a>
</li>
<li><a href="../../api/julia/index.html" tabindex="-1">Julia</a>
</li>
<li><a href="../../api/perl/index.html" tabindex="-1">Perl</a>
</li>
<li><a href="../../api/r/index.html" tabindex="-1">R</a>
</li>
<li><a href="../../api/scala/index.html" tabindex="-1">Scala</a>
</li>
</ul>
</li>
<li class="dropdown-submenu">
<a aria-expanded="true" aria-haspopup="true" class="dropdown-toggle burger-link" data-toggle="dropdown" href="#" tabindex="-1">Docs</a>
<ul class="dropdown-menu">
<li><a href="../../tutorials/index.html" tabindex="-1">Tutorials</a></li>
<li><a href="../../faq/index.html" tabindex="-1">FAQ</a></li>
<li><a href="../../architecture/index.html" tabindex="-1">Architecture</a></li>
<li><a href="https://github.com/apache/incubator-mxnet/tree/master/example" tabindex="-1">Examples</a></li>
<li><a href="../../api/python/gluon/model_zoo.html" tabindex="-1">Gluon Model Zoo</a></li>
</ul>
</li>
<li><a class="main-nav-link" href="https://github.com/dmlc/mxnet">Github</a></li>
<li id="dropdown-menu-position-anchor-version-mobile" class="dropdown-submenu" style="position: relative"><a href="#" tabindex="-1">Versions(master)</a><ul class="dropdown-menu"><li><a tabindex="-1" href=http://mxnet.incubator.apache.org/>master</a></li><li><a tabindex="-1" href=http://mxnet.incubator.apache.org/versions/1.2.1/index.html>1.2.1</a></li><li><a tabindex="-1" href=http://mxnet.incubator.apache.org/versions/1.1.0/index.html>1.1.0</a></li><li><a tabindex="-1" href=http://mxnet.incubator.apache.org/versions/1.0.0/index.html>1.0.0</a></li><li><a tabindex="-1" href=http://mxnet.incubator.apache.org/versions/0.12.1/index.html>0.12.1</a></li><li><a tabindex="-1" href=http://mxnet.incubator.apache.org/versions/0.11.0/index.html>0.11.0</a></li></ul></li></ul>
</div>
<div class="plusIcon dropdown">
<a class="dropdown-toggle" data-toggle="dropdown" href="#" role="button"><span aria-hidden="true" class="glyphicon glyphicon-plus"></span></a>
<ul class="dropdown-menu dropdown-menu-right" id="plusMenu"></ul>
</div>
<div id="search-input-wrap">
<form action="../../search.html" autocomplete="off" class="" method="get" role="search">
<div class="form-group inner-addon left-addon">
<i class="glyphicon glyphicon-search"></i>
<input class="form-control" name="q" placeholder="Search" type="text"/>
</div>
<input name="check_keywords" type="hidden" value="yes">
<input name="area" type="hidden" value="default"/>
</input></form>
<div id="search-preview"></div>
</div>
<div id="searchIcon">
<span aria-hidden="true" class="glyphicon glyphicon-search"></span>
</div>
<!-- <div id="lang-select-wrap"> -->
<!-- <label id="lang-select-label"> -->
<!-- <\!-- <i class="fa fa-globe"></i> -\-> -->
<!-- <span></span> -->
<!-- </label> -->
<!-- <select id="lang-select"> -->
<!-- <option value="en">Eng</option> -->
<!-- <option value="zh">中文</option> -->
<!-- </select> -->
<!-- </div> -->
<!-- <a id="mobile-nav-toggle">
<span class="mobile-nav-toggle-bar"></span>
<span class="mobile-nav-toggle-bar"></span>
<span class="mobile-nav-toggle-bar"></span>
</a> -->
</div>
</div>
</div>
<script type="text/javascript">
$('body').css('background', 'white');
</script>
<div class="container">
<div class="row">
<div aria-label="main navigation" class="sphinxsidebar leftsidebar" role="navigation">
<div class="sphinxsidebarwrapper">
<ul>
<li class="toctree-l1"><a class="reference internal" href="../../api/python/index.html">Python Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../api/r/index.html">R Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../api/julia/index.html">Julia Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../api/c++/index.html">C++ Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../api/scala/index.html">Scala Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../api/perl/index.html">Perl Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../faq/index.html">HowTo Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../architecture/index.html">System Documents</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../tutorials/index.html">Tutorials</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../community/contribute.html">Community</a></li>
</ul>
</div>
</div>
<div class="content">
<div class="page-tracker"></div>
<h1>Source code for mxnet.metric</h1><div class="highlight"><pre>
<span></span><span class="c1"># Licensed to the Apache Software Foundation (ASF) under one</span>
<span class="c1"># or more contributor license agreements. See the NOTICE file</span>
<span class="c1"># distributed with this work for additional information</span>
<span class="c1"># regarding copyright ownership. The ASF licenses this file</span>
<span class="c1"># to you under the Apache License, Version 2.0 (the</span>
<span class="c1"># "License"); you may not use this file except in compliance</span>
<span class="c1"># with the License. You may obtain a copy of the License at</span>
<span class="c1">#</span>
<span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="c1">#</span>
<span class="c1"># Unless required by applicable law or agreed to in writing,</span>
<span class="c1"># software distributed under the License is distributed on an</span>
<span class="c1"># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY</span>
<span class="c1"># KIND, either express or implied. See the License for the</span>
<span class="c1"># specific language governing permissions and limitations</span>
<span class="c1"># under the License.</span>
<span class="c1"># coding: utf-8</span>
<span class="c1"># pylint: disable=no-member, too-many-lines</span>
<span class="sd">"""Online evaluation metric module."""</span>
<span class="kn">from</span> <span class="nn">__future__</span> <span class="k">import</span> <span class="n">absolute_import</span>
<span class="kn">import</span> <span class="nn">math</span>
<span class="kn">from</span> <span class="nn">collections</span> <span class="k">import</span> <span class="n">OrderedDict</span>
<span class="kn">import</span> <span class="nn">numpy</span>
<span class="kn">from</span> <span class="nn">.base</span> <span class="k">import</span> <span class="n">numeric_types</span><span class="p">,</span> <span class="n">string_types</span>
<span class="kn">from</span> <span class="nn">.</span> <span class="k">import</span> <span class="n">ndarray</span>
<span class="kn">from</span> <span class="nn">.</span> <span class="k">import</span> <span class="n">registry</span>
<div class="viewcode-block" id="check_label_shapes"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.check_label_shapes">[docs]</a><span class="k">def</span> <span class="nf">check_label_shapes</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">,</span> <span class="n">wrap</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">shape</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
<span class="sd">"""Helper function for checking shape of label and prediction</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> wrap : boolean</span>
<span class="sd"> If True, wrap labels/preds in a list if they are single NDArray</span>
<span class="sd"> shape : boolean</span>
<span class="sd"> If True, check the shape of labels and preds;</span>
<span class="sd"> Otherwise only check their length.</span>
<span class="sd"> """</span>
<span class="k">if</span> <span class="ow">not</span> <span class="n">shape</span><span class="p">:</span>
<span class="n">label_shape</span><span class="p">,</span> <span class="n">pred_shape</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">labels</span><span class="p">),</span> <span class="nb">len</span><span class="p">(</span><span class="n">preds</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">label_shape</span><span class="p">,</span> <span class="n">pred_shape</span> <span class="o">=</span> <span class="n">labels</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">preds</span><span class="o">.</span><span class="n">shape</span>
<span class="k">if</span> <span class="n">label_shape</span> <span class="o">!=</span> <span class="n">pred_shape</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">"Shape of labels </span><span class="si">{}</span><span class="s2"> does not match shape of "</span>
<span class="s2">"predictions </span><span class="si">{}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">label_shape</span><span class="p">,</span> <span class="n">pred_shape</span><span class="p">))</span>
<span class="k">if</span> <span class="n">wrap</span><span class="p">:</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">ndarray</span><span class="o">.</span><span class="n">ndarray</span><span class="o">.</span><span class="n">NDArray</span><span class="p">):</span>
<span class="n">labels</span> <span class="o">=</span> <span class="p">[</span><span class="n">labels</span><span class="p">]</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">preds</span><span class="p">,</span> <span class="n">ndarray</span><span class="o">.</span><span class="n">ndarray</span><span class="o">.</span><span class="n">NDArray</span><span class="p">):</span>
<span class="n">preds</span> <span class="o">=</span> <span class="p">[</span><span class="n">preds</span><span class="p">]</span>
<span class="k">return</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span></div>
<div class="viewcode-block" id="EvalMetric"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.EvalMetric">[docs]</a><span class="k">class</span> <span class="nc">EvalMetric</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
<span class="sd">"""Base class for all evaluation metrics.</span>
<span class="sd"> .. note::</span>
<span class="sd"> This is a base class that provides common metric interfaces.</span>
<span class="sd"> One should not use this class directly, but instead create new metric</span>
<span class="sd"> classes that extend it.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
<span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">name</span> <span class="o">=</span> <span class="nb">str</span><span class="p">(</span><span class="n">name</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">output_names</span> <span class="o">=</span> <span class="n">output_names</span>
<span class="bp">self</span><span class="o">.</span><span class="n">label_names</span> <span class="o">=</span> <span class="n">label_names</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_kwargs</span> <span class="o">=</span> <span class="n">kwargs</span>
<span class="bp">self</span><span class="o">.</span><span class="n">reset</span><span class="p">()</span>
<span class="k">def</span> <span class="nf">__str__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="k">return</span> <span class="s2">"EvalMetric: </span><span class="si">{}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="nb">dict</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">get_name_value</span><span class="p">()))</span>
<div class="viewcode-block" id="EvalMetric.get_config"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.EvalMetric.get_config">[docs]</a> <span class="k">def</span> <span class="nf">get_config</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">"""Save configurations of metric. Can be recreated</span>
<span class="sd"> from configs with metric.create(**config)</span>
<span class="sd"> """</span>
<span class="n">config</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_kwargs</span><span class="o">.</span><span class="n">copy</span><span class="p">()</span>
<span class="n">config</span><span class="o">.</span><span class="n">update</span><span class="p">({</span>
<span class="s1">'metric'</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="o">.</span><span class="vm">__name__</span><span class="p">,</span>
<span class="s1">'name'</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">name</span><span class="p">,</span>
<span class="s1">'output_names'</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">output_names</span><span class="p">,</span>
<span class="s1">'label_names'</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">label_names</span><span class="p">})</span>
<span class="k">return</span> <span class="n">config</span></div>
<div class="viewcode-block" id="EvalMetric.update_dict"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.EvalMetric.update_dict">[docs]</a> <span class="k">def</span> <span class="nf">update_dict</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred</span><span class="p">):</span>
<span class="sd">"""Update the internal evaluation with named label and pred</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : OrderedDict of str -> NDArray</span>
<span class="sd"> name to array mapping for labels.</span>
<span class="sd"> preds : OrderedDict of str -> NDArray</span>
<span class="sd"> name to array mapping of predicted outputs.</span>
<span class="sd"> """</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">output_names</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">pred</span> <span class="o">=</span> <span class="p">[</span><span class="n">pred</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="k">for</span> <span class="n">name</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">output_names</span><span class="p">]</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">pred</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">pred</span><span class="o">.</span><span class="n">values</span><span class="p">())</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">label_names</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">label</span> <span class="o">=</span> <span class="p">[</span><span class="n">label</span><span class="p">[</span><span class="n">name</span><span class="p">]</span> <span class="k">for</span> <span class="n">name</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">label_names</span><span class="p">]</span>
<span class="k">else</span><span class="p">:</span>
<span class="n">label</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">label</span><span class="o">.</span><span class="n">values</span><span class="p">())</span>
<span class="bp">self</span><span class="o">.</span><span class="n">update</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">pred</span><span class="p">)</span></div>
<div class="viewcode-block" id="EvalMetric.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.EvalMetric.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
<div class="viewcode-block" id="EvalMetric.reset"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.EvalMetric.reset">[docs]</a> <span class="k">def</span> <span class="nf">reset</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">"""Resets the internal evaluation result to initial state."""</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">=</span> <span class="mi">0</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">=</span> <span class="mf">0.0</span></div>
<div class="viewcode-block" id="EvalMetric.get"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.EvalMetric.get">[docs]</a> <span class="k">def</span> <span class="nf">get</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">"""Gets the current evaluation result.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> names : list of str</span>
<span class="sd"> Name of the metrics.</span>
<span class="sd"> values : list of float</span>
<span class="sd"> Value of the evaluations.</span>
<span class="sd"> """</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span> <span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="nb">float</span><span class="p">(</span><span class="s1">'nan'</span><span class="p">))</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">/</span> <span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span><span class="p">)</span></div>
<div class="viewcode-block" id="EvalMetric.get_name_value"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.EvalMetric.get_name_value">[docs]</a> <span class="k">def</span> <span class="nf">get_name_value</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">"""Returns zipped name and value pairs.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> list of tuples</span>
<span class="sd"> A (name, value) tuple list.</span>
<span class="sd"> """</span>
<span class="n">name</span><span class="p">,</span> <span class="n">value</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get</span><span class="p">()</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="nb">list</span><span class="p">):</span>
<span class="n">name</span> <span class="o">=</span> <span class="p">[</span><span class="n">name</span><span class="p">]</span>
<span class="k">if</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="nb">list</span><span class="p">):</span>
<span class="n">value</span> <span class="o">=</span> <span class="p">[</span><span class="n">value</span><span class="p">]</span>
<span class="k">return</span> <span class="nb">list</span><span class="p">(</span><span class="nb">zip</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">value</span><span class="p">))</span></div></div>
<span class="c1"># pylint: disable=invalid-name</span>
<span class="n">register</span> <span class="o">=</span> <span class="n">registry</span><span class="o">.</span><span class="n">get_register_func</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">,</span> <span class="s1">'metric'</span><span class="p">)</span>
<span class="n">alias</span> <span class="o">=</span> <span class="n">registry</span><span class="o">.</span><span class="n">get_alias_func</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">,</span> <span class="s1">'metric'</span><span class="p">)</span>
<span class="n">_create</span> <span class="o">=</span> <span class="n">registry</span><span class="o">.</span><span class="n">get_create_func</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">,</span> <span class="s1">'metric'</span><span class="p">)</span>
<span class="c1"># pylint: enable=invalid-name</span>
<div class="viewcode-block" id="create"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.create">[docs]</a><span class="k">def</span> <span class="nf">create</span><span class="p">(</span><span class="n">metric</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
<span class="sd">"""Creates evaluation metric from metric names or instances of EvalMetric</span>
<span class="sd"> or a custom metric function.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> metric : str or callable</span>
<span class="sd"> Specifies the metric to create.</span>
<span class="sd"> This argument must be one of the below:</span>
<span class="sd"> - Name of a metric.</span>
<span class="sd"> - An instance of `EvalMetric`.</span>
<span class="sd"> - A list, each element of which is a metric or a metric name.</span>
<span class="sd"> - An evaluation function that computes custom metric for a given batch of</span>
<span class="sd"> labels and predictions.</span>
<span class="sd"> *args : list</span>
<span class="sd"> Additional arguments to metric constructor.</span>
<span class="sd"> Only used when metric is str.</span>
<span class="sd"> **kwargs : dict</span>
<span class="sd"> Additional arguments to metric constructor.</span>
<span class="sd"> Only used when metric is str</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> def custom_metric(label, pred):</span>
<span class="sd"> ... return np.mean(np.abs(label - pred))</span>
<span class="sd"> ...</span>
<span class="sd"> >>> metric1 = mx.metric.create('acc')</span>
<span class="sd"> >>> metric2 = mx.metric.create(custom_metric)</span>
<span class="sd"> >>> metric3 = mx.metric.create([metric1, metric2, 'rmse'])</span>
<span class="sd"> """</span>
<span class="k">if</span> <span class="n">callable</span><span class="p">(</span><span class="n">metric</span><span class="p">):</span>
<span class="k">return</span> <span class="n">CustomMetric</span><span class="p">(</span><span class="n">metric</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
<span class="k">elif</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">metric</span><span class="p">,</span> <span class="nb">list</span><span class="p">):</span>
<span class="n">composite_metric</span> <span class="o">=</span> <span class="n">CompositeEvalMetric</span><span class="p">()</span>
<span class="k">for</span> <span class="n">child_metric</span> <span class="ow">in</span> <span class="n">metric</span><span class="p">:</span>
<span class="n">composite_metric</span><span class="o">.</span><span class="n">add</span><span class="p">(</span><span class="n">create</span><span class="p">(</span><span class="n">child_metric</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">))</span>
<span class="k">return</span> <span class="n">composite_metric</span>
<span class="k">return</span> <span class="n">_create</span><span class="p">(</span><span class="n">metric</span><span class="p">,</span> <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
<span class="nd">@register</span>
<span class="nd">@alias</span><span class="p">(</span><span class="s1">'composite'</span><span class="p">)</span>
<div class="viewcode-block" id="CompositeEvalMetric"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.CompositeEvalMetric">[docs]</a><span class="k">class</span> <span class="nc">CompositeEvalMetric</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Manages multiple evaluation metrics.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> metrics : list of EvalMetric</span>
<span class="sd"> List of child metrics.</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])]</span>
<span class="sd"> >>> labels = [mx.nd.array([0, 1, 1])]</span>
<span class="sd"> >>> eval_metrics_1 = mx.metric.Accuracy()</span>
<span class="sd"> >>> eval_metrics_2 = mx.metric.F1()</span>
<span class="sd"> >>> eval_metrics = mx.metric.CompositeEvalMetric()</span>
<span class="sd"> >>> for child_metric in [eval_metrics_1, eval_metrics_2]:</span>
<span class="sd"> >>> eval_metrics.add(child_metric)</span>
<span class="sd"> >>> eval_metrics.update(labels = labels, preds = predicts)</span>
<span class="sd"> >>> print eval_metrics.get()</span>
<span class="sd"> (['accuracy', 'f1'], [0.6666666666666666, 0.8])</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">metrics</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'composite'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">CompositeEvalMetric</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<span class="k">if</span> <span class="n">metrics</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">metrics</span> <span class="o">=</span> <span class="p">[]</span>
<span class="bp">self</span><span class="o">.</span><span class="n">metrics</span> <span class="o">=</span> <span class="p">[</span><span class="n">create</span><span class="p">(</span><span class="n">i</span><span class="p">)</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="n">metrics</span><span class="p">]</span>
<div class="viewcode-block" id="CompositeEvalMetric.add"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.CompositeEvalMetric.add">[docs]</a> <span class="k">def</span> <span class="nf">add</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">metric</span><span class="p">):</span>
<span class="sd">"""Adds a child metric.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> metric</span>
<span class="sd"> A metric instance.</span>
<span class="sd"> """</span>
<span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">create</span><span class="p">(</span><span class="n">metric</span><span class="p">))</span></div>
<div class="viewcode-block" id="CompositeEvalMetric.get_metric"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.CompositeEvalMetric.get_metric">[docs]</a> <span class="k">def</span> <span class="nf">get_metric</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">index</span><span class="p">):</span>
<span class="sd">"""Returns a child metric.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> index : int</span>
<span class="sd"> Index of child metric in the list of metrics.</span>
<span class="sd"> """</span>
<span class="k">try</span><span class="p">:</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="p">[</span><span class="n">index</span><span class="p">]</span>
<span class="k">except</span> <span class="ne">IndexError</span><span class="p">:</span>
<span class="k">return</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">"Metric index </span><span class="si">{}</span><span class="s2"> is out of range 0 and </span><span class="si">{}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span>
<span class="n">index</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="p">)))</span></div>
<span class="k">def</span> <span class="nf">update_dict</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span> <span class="c1"># pylint: disable=arguments-differ</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">label_names</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">labels</span> <span class="o">=</span> <span class="n">OrderedDict</span><span class="p">([</span><span class="n">i</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="n">labels</span><span class="o">.</span><span class="n">items</span><span class="p">()</span>
<span class="k">if</span> <span class="n">i</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">label_names</span><span class="p">])</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">output_names</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">preds</span> <span class="o">=</span> <span class="n">OrderedDict</span><span class="p">([</span><span class="n">i</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="n">preds</span><span class="o">.</span><span class="n">items</span><span class="p">()</span>
<span class="k">if</span> <span class="n">i</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">output_names</span><span class="p">])</span>
<span class="k">for</span> <span class="n">metric</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="p">:</span>
<span class="n">metric</span><span class="o">.</span><span class="n">update_dict</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">)</span>
<div class="viewcode-block" id="CompositeEvalMetric.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.CompositeEvalMetric.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="k">for</span> <span class="n">metric</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="p">:</span>
<span class="n">metric</span><span class="o">.</span><span class="n">update</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">)</span></div>
<div class="viewcode-block" id="CompositeEvalMetric.reset"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.CompositeEvalMetric.reset">[docs]</a> <span class="k">def</span> <span class="nf">reset</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">"""Resets the internal evaluation result to initial state."""</span>
<span class="k">try</span><span class="p">:</span>
<span class="k">for</span> <span class="n">metric</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="p">:</span>
<span class="n">metric</span><span class="o">.</span><span class="n">reset</span><span class="p">()</span>
<span class="k">except</span> <span class="ne">AttributeError</span><span class="p">:</span>
<span class="k">pass</span></div>
<div class="viewcode-block" id="CompositeEvalMetric.get"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.CompositeEvalMetric.get">[docs]</a> <span class="k">def</span> <span class="nf">get</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">"""Returns the current evaluation result.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> names : list of str</span>
<span class="sd"> Name of the metrics.</span>
<span class="sd"> values : list of float</span>
<span class="sd"> Value of the evaluations.</span>
<span class="sd"> """</span>
<span class="n">names</span> <span class="o">=</span> <span class="p">[]</span>
<span class="n">values</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">metric</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="p">:</span>
<span class="n">name</span><span class="p">,</span> <span class="n">value</span> <span class="o">=</span> <span class="n">metric</span><span class="o">.</span><span class="n">get</span><span class="p">()</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">name</span><span class="p">,</span> <span class="n">string_types</span><span class="p">):</span>
<span class="n">name</span> <span class="o">=</span> <span class="p">[</span><span class="n">name</span><span class="p">]</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">value</span><span class="p">,</span> <span class="n">numeric_types</span><span class="p">):</span>
<span class="n">value</span> <span class="o">=</span> <span class="p">[</span><span class="n">value</span><span class="p">]</span>
<span class="n">names</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span><span class="n">name</span><span class="p">)</span>
<span class="n">values</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span><span class="n">value</span><span class="p">)</span>
<span class="k">return</span> <span class="p">(</span><span class="n">names</span><span class="p">,</span> <span class="n">values</span><span class="p">)</span></div>
<span class="k">def</span> <span class="nf">get_config</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="n">config</span> <span class="o">=</span> <span class="nb">super</span><span class="p">(</span><span class="n">CompositeEvalMetric</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="n">get_config</span><span class="p">()</span>
<span class="n">config</span><span class="o">.</span><span class="n">update</span><span class="p">({</span><span class="s1">'metrics'</span><span class="p">:</span> <span class="p">[</span><span class="n">i</span><span class="o">.</span><span class="n">get_config</span><span class="p">()</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="p">]})</span>
<span class="k">return</span> <span class="n">config</span></div>
<span class="c1">########################</span>
<span class="c1"># CLASSIFICATION METRICS</span>
<span class="c1">########################</span>
<span class="nd">@register</span>
<span class="nd">@alias</span><span class="p">(</span><span class="s1">'acc'</span><span class="p">)</span>
<div class="viewcode-block" id="Accuracy"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.Accuracy">[docs]</a><span class="k">class</span> <span class="nc">Accuracy</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Computes accuracy classification score.</span>
<span class="sd"> The accuracy score is defined as</span>
<span class="sd"> .. math::</span>
<span class="sd"> \\text{accuracy}(y, \\hat{y}) = \\frac{1}{n} \\sum_{i=0}^{n-1}</span>
<span class="sd"> \\text{1}(\\hat{y_i} == y_i)</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> axis : int, default=1</span>
<span class="sd"> The axis that represents classes</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])]</span>
<span class="sd"> >>> labels = [mx.nd.array([0, 1, 1])]</span>
<span class="sd"> >>> acc = mx.metric.Accuracy()</span>
<span class="sd"> >>> acc.update(preds = predicts, labels = labels)</span>
<span class="sd"> >>> print acc.get()</span>
<span class="sd"> ('accuracy', 0.6666666666666666)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">axis</span><span class="o">=</span><span class="mi">1</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'accuracy'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">Accuracy</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">axis</span><span class="o">=</span><span class="n">axis</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">axis</span> <span class="o">=</span> <span class="n">axis</span>
<div class="viewcode-block" id="Accuracy.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.Accuracy.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data with class indices as values, one per sample.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Prediction values for samples. Each prediction value can either be the class index,</span>
<span class="sd"> or a vector of likelihoods for all classes.</span>
<span class="sd"> """</span>
<span class="n">labels</span><span class="p">,</span> <span class="n">preds</span> <span class="o">=</span> <span class="n">check_label_shapes</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="k">for</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred_label</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="k">if</span> <span class="n">pred_label</span><span class="o">.</span><span class="n">shape</span> <span class="o">!=</span> <span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">:</span>
<span class="n">pred_label</span> <span class="o">=</span> <span class="n">ndarray</span><span class="o">.</span><span class="n">argmax</span><span class="p">(</span><span class="n">pred_label</span><span class="p">,</span> <span class="n">axis</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">axis</span><span class="p">)</span>
<span class="n">pred_label</span> <span class="o">=</span> <span class="n">pred_label</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="s1">'int32'</span><span class="p">)</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="s1">'int32'</span><span class="p">)</span>
<span class="c1"># flatten before checking shapes to avoid shape miss match</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">flat</span>
<span class="n">pred_label</span> <span class="o">=</span> <span class="n">pred_label</span><span class="o">.</span><span class="n">flat</span>
<span class="n">check_label_shapes</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">pred_label</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="p">(</span><span class="n">pred_label</span> <span class="o">==</span> <span class="n">label</span><span class="p">)</span><span class="o">.</span><span class="n">sum</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="nb">len</span><span class="p">(</span><span class="n">pred_label</span><span class="p">)</span></div></div>
<span class="nd">@register</span>
<span class="nd">@alias</span><span class="p">(</span><span class="s1">'top_k_accuracy'</span><span class="p">,</span> <span class="s1">'top_k_acc'</span><span class="p">)</span>
<div class="viewcode-block" id="TopKAccuracy"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.TopKAccuracy">[docs]</a><span class="k">class</span> <span class="nc">TopKAccuracy</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Computes top k predictions accuracy.</span>
<span class="sd"> `TopKAccuracy` differs from Accuracy in that it considers the prediction</span>
<span class="sd"> to be ``True`` as long as the ground truth label is in the top K</span>
<span class="sd"> predicated labels.</span>
<span class="sd"> If `top_k` = ``1``, then `TopKAccuracy` is identical to `Accuracy`.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> top_k : int</span>
<span class="sd"> Whether targets are in top k predictions.</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> np.random.seed(999)</span>
<span class="sd"> >>> top_k = 3</span>
<span class="sd"> >>> labels = [mx.nd.array([2, 6, 9, 2, 3, 4, 7, 8, 9, 6])]</span>
<span class="sd"> >>> predicts = [mx.nd.array(np.random.rand(10, 10))]</span>
<span class="sd"> >>> acc = mx.metric.TopKAccuracy(top_k=top_k)</span>
<span class="sd"> >>> acc.update(labels, predicts)</span>
<span class="sd"> >>> print acc.get()</span>
<span class="sd"> ('top_k_accuracy', 0.3)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">top_k</span><span class="o">=</span><span class="mi">1</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'top_k_accuracy'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">TopKAccuracy</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">top_k</span><span class="o">=</span><span class="n">top_k</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">top_k</span> <span class="o">=</span> <span class="n">top_k</span>
<span class="k">assert</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">top_k</span> <span class="o">></span> <span class="mi">1</span><span class="p">),</span> <span class="s1">'Please use Accuracy if top_k is no more than 1'</span>
<span class="bp">self</span><span class="o">.</span><span class="n">name</span> <span class="o">+=</span> <span class="s1">'_</span><span class="si">%d</span><span class="s1">'</span> <span class="o">%</span> <span class="bp">self</span><span class="o">.</span><span class="n">top_k</span>
<div class="viewcode-block" id="TopKAccuracy.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.TopKAccuracy.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="n">labels</span><span class="p">,</span> <span class="n">preds</span> <span class="o">=</span> <span class="n">check_label_shapes</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="k">for</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred_label</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="k">assert</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">pred_label</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="o"><=</span> <span class="mi">2</span><span class="p">),</span> <span class="s1">'Predictions should be no more than 2 dims'</span>
<span class="n">pred_label</span> <span class="o">=</span> <span class="n">numpy</span><span class="o">.</span><span class="n">argsort</span><span class="p">(</span><span class="n">pred_label</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="s1">'float32'</span><span class="p">),</span> <span class="n">axis</span><span class="o">=</span><span class="mi">1</span><span class="p">)</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="s1">'int32'</span><span class="p">)</span>
<span class="n">check_label_shapes</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">pred_label</span><span class="p">)</span>
<span class="n">num_samples</span> <span class="o">=</span> <span class="n">pred_label</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
<span class="n">num_dims</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">pred_label</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="k">if</span> <span class="n">num_dims</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="p">(</span><span class="n">pred_label</span><span class="o">.</span><span class="n">flat</span> <span class="o">==</span> <span class="n">label</span><span class="o">.</span><span class="n">flat</span><span class="p">)</span><span class="o">.</span><span class="n">sum</span><span class="p">()</span>
<span class="k">elif</span> <span class="n">num_dims</span> <span class="o">==</span> <span class="mi">2</span><span class="p">:</span>
<span class="n">num_classes</span> <span class="o">=</span> <span class="n">pred_label</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">1</span><span class="p">]</span>
<span class="n">top_k</span> <span class="o">=</span> <span class="nb">min</span><span class="p">(</span><span class="n">num_classes</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">top_k</span><span class="p">)</span>
<span class="k">for</span> <span class="n">j</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="n">top_k</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="p">(</span><span class="n">pred_label</span><span class="p">[:,</span> <span class="n">num_classes</span> <span class="o">-</span> <span class="mi">1</span> <span class="o">-</span> <span class="n">j</span><span class="p">]</span><span class="o">.</span><span class="n">flat</span> <span class="o">==</span> <span class="n">label</span><span class="o">.</span><span class="n">flat</span><span class="p">)</span><span class="o">.</span><span class="n">sum</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="n">num_samples</span></div></div>
<span class="k">class</span> <span class="nc">_BinaryClassificationMetrics</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> Private container class for classification metric statistics. True/false positive and</span>
<span class="sd"> true/false negative counts are sufficient statistics for various classification metrics.</span>
<span class="sd"> This class provides the machinery to track those statistics across mini-batches of</span>
<span class="sd"> (label, prediction) pairs.</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">true_positives</span> <span class="o">=</span> <span class="mi">0</span>
<span class="bp">self</span><span class="o">.</span><span class="n">false_negatives</span> <span class="o">=</span> <span class="mi">0</span>
<span class="bp">self</span><span class="o">.</span><span class="n">false_positives</span> <span class="o">=</span> <span class="mi">0</span>
<span class="bp">self</span><span class="o">.</span><span class="n">true_negatives</span> <span class="o">=</span> <span class="mi">0</span>
<span class="k">def</span> <span class="nf">update_binary_stats</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> Update various binary classification counts for a single (label, pred)</span>
<span class="sd"> pair.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> label : `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> pred : `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">pred</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="s1">'int32'</span><span class="p">)</span>
<span class="n">pred_label</span> <span class="o">=</span> <span class="n">numpy</span><span class="o">.</span><span class="n">argmax</span><span class="p">(</span><span class="n">pred</span><span class="p">,</span> <span class="n">axis</span><span class="o">=</span><span class="mi">1</span><span class="p">)</span>
<span class="n">check_label_shapes</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">pred</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">numpy</span><span class="o">.</span><span class="n">unique</span><span class="p">(</span><span class="n">label</span><span class="p">))</span> <span class="o">></span> <span class="mi">2</span><span class="p">:</span>
<span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">"</span><span class="si">%s</span><span class="s2"> currently only supports binary classification."</span>
<span class="o">%</span> <span class="bp">self</span><span class="o">.</span><span class="vm">__class__</span><span class="o">.</span><span class="vm">__name__</span><span class="p">)</span>
<span class="n">pred_true</span> <span class="o">=</span> <span class="p">(</span><span class="n">pred_label</span> <span class="o">==</span> <span class="mi">1</span><span class="p">)</span>
<span class="n">pred_false</span> <span class="o">=</span> <span class="mi">1</span> <span class="o">-</span> <span class="n">pred_true</span>
<span class="n">label_true</span> <span class="o">=</span> <span class="p">(</span><span class="n">label</span> <span class="o">==</span> <span class="mi">1</span><span class="p">)</span>
<span class="n">label_false</span> <span class="o">=</span> <span class="mi">1</span> <span class="o">-</span> <span class="n">label_true</span>
<span class="bp">self</span><span class="o">.</span><span class="n">true_positives</span> <span class="o">+=</span> <span class="p">(</span><span class="n">pred_true</span> <span class="o">*</span> <span class="n">label_true</span><span class="p">)</span><span class="o">.</span><span class="n">sum</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">false_positives</span> <span class="o">+=</span> <span class="p">(</span><span class="n">pred_true</span> <span class="o">*</span> <span class="n">label_false</span><span class="p">)</span><span class="o">.</span><span class="n">sum</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">false_negatives</span> <span class="o">+=</span> <span class="p">(</span><span class="n">pred_false</span> <span class="o">*</span> <span class="n">label_true</span><span class="p">)</span><span class="o">.</span><span class="n">sum</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">true_negatives</span> <span class="o">+=</span> <span class="p">(</span><span class="n">pred_false</span> <span class="o">*</span> <span class="n">label_false</span><span class="p">)</span><span class="o">.</span><span class="n">sum</span><span class="p">()</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">precision</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">true_positives</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">false_positives</span> <span class="o">></span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span> <span class="nb">float</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">true_positives</span><span class="p">)</span> <span class="o">/</span> <span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">true_positives</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">false_positives</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="mf">0.</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">recall</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">true_positives</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">false_negatives</span> <span class="o">></span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span> <span class="nb">float</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">true_positives</span><span class="p">)</span> <span class="o">/</span> <span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">true_positives</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">false_negatives</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="mf">0.</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">fscore</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">precision</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">recall</span> <span class="o">></span> <span class="mi">0</span><span class="p">:</span>
<span class="k">return</span> <span class="mi">2</span> <span class="o">*</span> <span class="bp">self</span><span class="o">.</span><span class="n">precision</span> <span class="o">*</span> <span class="bp">self</span><span class="o">.</span><span class="n">recall</span> <span class="o">/</span> <span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">precision</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">recall</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
<span class="k">return</span> <span class="mf">0.</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">matthewscc</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> Calculate the Matthew's Correlation Coefficent</span>
<span class="sd"> """</span>
<span class="k">if</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">total_examples</span><span class="p">:</span>
<span class="k">return</span> <span class="mf">0.</span>
<span class="n">true_pos</span> <span class="o">=</span> <span class="nb">float</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">true_positives</span><span class="p">)</span>
<span class="n">false_pos</span> <span class="o">=</span> <span class="nb">float</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">false_positives</span><span class="p">)</span>
<span class="n">false_neg</span> <span class="o">=</span> <span class="nb">float</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">false_negatives</span><span class="p">)</span>
<span class="n">true_neg</span> <span class="o">=</span> <span class="nb">float</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">true_negatives</span><span class="p">)</span>
<span class="n">terms</span> <span class="o">=</span> <span class="p">[(</span><span class="n">true_pos</span> <span class="o">+</span> <span class="n">false_pos</span><span class="p">),</span>
<span class="p">(</span><span class="n">true_pos</span> <span class="o">+</span> <span class="n">false_neg</span><span class="p">),</span>
<span class="p">(</span><span class="n">true_neg</span> <span class="o">+</span> <span class="n">false_pos</span><span class="p">),</span>
<span class="p">(</span><span class="n">true_neg</span> <span class="o">+</span> <span class="n">false_neg</span><span class="p">)]</span>
<span class="n">denom</span> <span class="o">=</span> <span class="mf">1.</span>
<span class="k">for</span> <span class="n">t</span> <span class="ow">in</span> <span class="nb">filter</span><span class="p">(</span><span class="k">lambda</span> <span class="n">t</span><span class="p">:</span> <span class="n">t</span> <span class="o">!=</span> <span class="mf">0.</span><span class="p">,</span> <span class="n">terms</span><span class="p">):</span>
<span class="n">denom</span> <span class="o">*=</span> <span class="n">t</span>
<span class="k">return</span> <span class="p">((</span><span class="n">true_pos</span> <span class="o">*</span> <span class="n">true_neg</span><span class="p">)</span> <span class="o">-</span> <span class="p">(</span><span class="n">false_pos</span> <span class="o">*</span> <span class="n">false_neg</span><span class="p">))</span> <span class="o">/</span> <span class="n">math</span><span class="o">.</span><span class="n">sqrt</span><span class="p">(</span><span class="n">denom</span><span class="p">)</span>
<span class="nd">@property</span>
<span class="k">def</span> <span class="nf">total_examples</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">false_negatives</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">false_positives</span> <span class="o">+</span> \
<span class="bp">self</span><span class="o">.</span><span class="n">true_negatives</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">true_positives</span>
<span class="k">def</span> <span class="nf">reset_stats</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">false_positives</span> <span class="o">=</span> <span class="mi">0</span>
<span class="bp">self</span><span class="o">.</span><span class="n">false_negatives</span> <span class="o">=</span> <span class="mi">0</span>
<span class="bp">self</span><span class="o">.</span><span class="n">true_positives</span> <span class="o">=</span> <span class="mi">0</span>
<span class="bp">self</span><span class="o">.</span><span class="n">true_negatives</span> <span class="o">=</span> <span class="mi">0</span>
<span class="nd">@register</span>
<div class="viewcode-block" id="F1"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.F1">[docs]</a><span class="k">class</span> <span class="nc">F1</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Computes the F1 score of a binary classification problem.</span>
<span class="sd"> The F1 score is equivalent to harmonic mean of the precision and recall,</span>
<span class="sd"> where the best value is 1.0 and the worst value is 0.0. The formula for F1 score is::</span>
<span class="sd"> F1 = 2 * (precision * recall) / (precision + recall)</span>
<span class="sd"> The formula for precision and recall is::</span>
<span class="sd"> precision = true_positives / (true_positives + false_positives)</span>
<span class="sd"> recall = true_positives / (true_positives + false_negatives)</span>
<span class="sd"> .. note::</span>
<span class="sd"> This F1 score only supports binary classification.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> average : str, default 'macro'</span>
<span class="sd"> Strategy to be used for aggregating across mini-batches.</span>
<span class="sd"> "macro": average the F1 scores for each batch.</span>
<span class="sd"> "micro": compute a single F1 score across all batches.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> predicts = [mx.nd.array([[0.3, 0.7], [0., 1.], [0.4, 0.6]])]</span>
<span class="sd"> >>> labels = [mx.nd.array([0., 1., 1.])]</span>
<span class="sd"> >>> f1 = mx.metric.F1()</span>
<span class="sd"> >>> f1.update(preds = predicts, labels = labels)</span>
<span class="sd"> >>> print f1.get()</span>
<span class="sd"> ('f1', 0.8)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'f1'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">average</span><span class="o">=</span><span class="s2">"macro"</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">average</span> <span class="o">=</span> <span class="n">average</span>
<span class="bp">self</span><span class="o">.</span><span class="n">metrics</span> <span class="o">=</span> <span class="n">_BinaryClassificationMetrics</span><span class="p">()</span>
<span class="n">EvalMetric</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="n">name</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<div class="viewcode-block" id="F1.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.F1.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="n">labels</span><span class="p">,</span> <span class="n">preds</span> <span class="o">=</span> <span class="n">check_label_shapes</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="k">for</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="o">.</span><span class="n">update_binary_stats</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">pred</span><span class="p">)</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">average</span> <span class="o">==</span> <span class="s2">"macro"</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="o">.</span><span class="n">fscore</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="mi">1</span>
<span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="o">.</span><span class="n">reset_stats</span><span class="p">()</span>
<span class="k">else</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="o">.</span><span class="n">fscore</span> <span class="o">*</span> <span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="o">.</span><span class="n">total_examples</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="o">.</span><span class="n">total_examples</span></div>
<div class="viewcode-block" id="F1.reset"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.F1.reset">[docs]</a> <span class="k">def</span> <span class="nf">reset</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">"""Resets the internal evaluation result to initial state."""</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">=</span> <span class="mf">0.</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">=</span> <span class="mf">0.</span>
<span class="bp">self</span><span class="o">.</span><span class="n">metrics</span><span class="o">.</span><span class="n">reset_stats</span><span class="p">()</span></div></div>
<span class="nd">@register</span>
<div class="viewcode-block" id="MCC"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.MCC">[docs]</a><span class="k">class</span> <span class="nc">MCC</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Computes the Matthews Correlation Coefficient of a binary classification problem.</span>
<span class="sd"> While slower to compute than F1 the MCC can give insight that F1 or Accuracy cannot.</span>
<span class="sd"> For instance, if the network always predicts the same result</span>
<span class="sd"> then the MCC will immeadiately show this. The MCC is also symetric with respect</span>
<span class="sd"> to positive and negative categorization, however, there needs to be both</span>
<span class="sd"> positive and negative examples in the labels or it will always return 0.</span>
<span class="sd"> MCC of 0 is uncorrelated, 1 is completely correlated, and -1 is negatively correlated.</span>
<span class="sd"> .. math::</span>
<span class="sd"> \\text{MCC} = \\frac{ TP \\times TN - FP \\times FN }</span>
<span class="sd"> {\\sqrt{ (TP + FP) ( TP + FN ) ( TN + FP ) ( TN + FN ) } }</span>
<span class="sd"> where 0 terms in the denominator are replaced by 1.</span>
<span class="sd"> .. note::</span>
<span class="sd"> This version of MCC only supports binary classification.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> average : str, default 'macro'</span>
<span class="sd"> Strategy to be used for aggregating across mini-batches.</span>
<span class="sd"> "macro": average the MCC for each batch.</span>
<span class="sd"> "micro": compute a single MCC across all batches.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> # In this example the network almost always predicts positive</span>
<span class="sd"> >>> false_positives = 1000</span>
<span class="sd"> >>> false_negatives = 1</span>
<span class="sd"> >>> true_positives = 10000</span>
<span class="sd"> >>> true_negatives = 1</span>
<span class="sd"> >>> predicts = [mx.nd.array(</span>
<span class="sd"> [[.3, .7]]*false_positives +</span>
<span class="sd"> [[.7, .3]]*true_negatives +</span>
<span class="sd"> [[.7, .3]]*false_negatives +</span>
<span class="sd"> [[.3, .7]]*true_positives</span>
<span class="sd"> )]</span>
<span class="sd"> >>> labels = [mx.nd.array(</span>
<span class="sd"> [0.]*(false_positives + true_negatives) +</span>
<span class="sd"> [1.]*(false_negatives + true_positives)</span>
<span class="sd"> )]</span>
<span class="sd"> >>> f1 = mx.metric.F1()</span>
<span class="sd"> >>> f1.update(preds = predicts, labels = labels)</span>
<span class="sd"> >>> mcc = mx.metric.MCC()</span>
<span class="sd"> >>> mcc.update(preds = predicts, labels = labels)</span>
<span class="sd"> >>> print f1.get()</span>
<span class="sd"> ('f1', 0.95233560306652054)</span>
<span class="sd"> >>> print mcc.get()</span>
<span class="sd"> ('mcc', 0.01917751877733392)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'mcc'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">average</span><span class="o">=</span><span class="s2">"macro"</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_average</span> <span class="o">=</span> <span class="n">average</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_metrics</span> <span class="o">=</span> <span class="n">_BinaryClassificationMetrics</span><span class="p">()</span>
<span class="n">EvalMetric</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="n">name</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<div class="viewcode-block" id="MCC.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.MCC.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="n">labels</span><span class="p">,</span> <span class="n">preds</span> <span class="o">=</span> <span class="n">check_label_shapes</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="k">for</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_metrics</span><span class="o">.</span><span class="n">update_binary_stats</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">pred</span><span class="p">)</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">_average</span> <span class="o">==</span> <span class="s2">"macro"</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_metrics</span><span class="o">.</span><span class="n">matthewscc</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="mi">1</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_metrics</span><span class="o">.</span><span class="n">reset_stats</span><span class="p">()</span>
<span class="k">else</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_metrics</span><span class="o">.</span><span class="n">matthewscc</span> <span class="o">*</span> <span class="bp">self</span><span class="o">.</span><span class="n">_metrics</span><span class="o">.</span><span class="n">total_examples</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_metrics</span><span class="o">.</span><span class="n">total_examples</span></div>
<div class="viewcode-block" id="MCC.reset"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.MCC.reset">[docs]</a> <span class="k">def</span> <span class="nf">reset</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">"""Resets the internal evaluation result to initial state."""</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">=</span> <span class="mf">0.</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">=</span> <span class="mf">0.</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_metrics</span><span class="o">.</span><span class="n">reset_stats</span><span class="p">()</span></div></div>
<span class="nd">@register</span>
<div class="viewcode-block" id="Perplexity"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.Perplexity">[docs]</a><span class="k">class</span> <span class="nc">Perplexity</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Computes perplexity.</span>
<span class="sd"> Perplexity is a measurement of how well a probability distribution</span>
<span class="sd"> or model predicts a sample. A low perplexity indicates the model</span>
<span class="sd"> is good at predicting the sample.</span>
<span class="sd"> The perplexity of a model q is defined as</span>
<span class="sd"> .. math::</span>
<span class="sd"> b^{\\big(-\\frac{1}{N} \\sum_{i=1}^N \\log_b q(x_i) \\big)}</span>
<span class="sd"> = \\exp \\big(-\\frac{1}{N} \\sum_{i=1}^N \\log q(x_i)\\big)</span>
<span class="sd"> where we let `b = e`.</span>
<span class="sd"> :math:`q(x_i)` is the predicted value of its ground truth</span>
<span class="sd"> label on sample :math:`x_i`.</span>
<span class="sd"> For example, we have three samples :math:`x_1, x_2, x_3` and their labels</span>
<span class="sd"> are :math:`[0, 1, 1]`.</span>
<span class="sd"> Suppose our model predicts :math:`q(x_1) = p(y_1 = 0 | x_1) = 0.3`</span>
<span class="sd"> and :math:`q(x_2) = 1.0`,</span>
<span class="sd"> :math:`q(x_3) = 0.6`. The perplexity of model q is</span>
<span class="sd"> :math:`exp\\big(-(\\log 0.3 + \\log 1.0 + \\log 0.6) / 3\\big) = 1.77109762852`.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> ignore_label : int or None</span>
<span class="sd"> Index of invalid label to ignore when</span>
<span class="sd"> counting. By default, sets to -1.</span>
<span class="sd"> If set to `None`, it will include all entries.</span>
<span class="sd"> axis : int (default -1)</span>
<span class="sd"> The axis from prediction that was used to</span>
<span class="sd"> compute softmax. By default use the last</span>
<span class="sd"> axis.</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])]</span>
<span class="sd"> >>> labels = [mx.nd.array([0, 1, 1])]</span>
<span class="sd"> >>> perp = mx.metric.Perplexity(ignore_label=None)</span>
<span class="sd"> >>> perp.update(labels, predicts)</span>
<span class="sd"> >>> print perp.get()</span>
<span class="sd"> ('Perplexity', 1.7710976285155853)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">ignore_label</span><span class="p">,</span> <span class="n">axis</span><span class="o">=-</span><span class="mi">1</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'perplexity'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">Perplexity</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">ignore_label</span><span class="o">=</span><span class="n">ignore_label</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">ignore_label</span> <span class="o">=</span> <span class="n">ignore_label</span>
<span class="bp">self</span><span class="o">.</span><span class="n">axis</span> <span class="o">=</span> <span class="n">axis</span>
<div class="viewcode-block" id="Perplexity.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.Perplexity.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="n">labels</span><span class="p">)</span> <span class="o">==</span> <span class="nb">len</span><span class="p">(</span><span class="n">preds</span><span class="p">)</span>
<span class="n">loss</span> <span class="o">=</span> <span class="mf">0.</span>
<span class="n">num</span> <span class="o">=</span> <span class="mi">0</span>
<span class="k">for</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="k">assert</span> <span class="n">label</span><span class="o">.</span><span class="n">size</span> <span class="o">==</span> <span class="n">pred</span><span class="o">.</span><span class="n">size</span><span class="o">/</span><span class="n">pred</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="o">-</span><span class="mi">1</span><span class="p">],</span> \
<span class="s2">"shape mismatch: </span><span class="si">%s</span><span class="s2"> vs. </span><span class="si">%s</span><span class="s2">"</span><span class="o">%</span><span class="p">(</span><span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span> <span class="n">pred</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">as_in_context</span><span class="p">(</span><span class="n">pred</span><span class="o">.</span><span class="n">context</span><span class="p">)</span><span class="o">.</span><span class="n">reshape</span><span class="p">((</span><span class="n">label</span><span class="o">.</span><span class="n">size</span><span class="p">,))</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">ndarray</span><span class="o">.</span><span class="n">pick</span><span class="p">(</span><span class="n">pred</span><span class="p">,</span> <span class="n">label</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="n">dtype</span><span class="o">=</span><span class="s1">'int32'</span><span class="p">),</span> <span class="n">axis</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">axis</span><span class="p">)</span>
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">ignore_label</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">ignore</span> <span class="o">=</span> <span class="p">(</span><span class="n">label</span> <span class="o">==</span> <span class="bp">self</span><span class="o">.</span><span class="n">ignore_label</span><span class="p">)</span><span class="o">.</span><span class="n">astype</span><span class="p">(</span><span class="n">pred</span><span class="o">.</span><span class="n">dtype</span><span class="p">)</span>
<span class="n">num</span> <span class="o">-=</span> <span class="n">ndarray</span><span class="o">.</span><span class="n">sum</span><span class="p">(</span><span class="n">ignore</span><span class="p">)</span><span class="o">.</span><span class="n">asscalar</span><span class="p">()</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">pred</span><span class="o">*</span><span class="p">(</span><span class="mi">1</span><span class="o">-</span><span class="n">ignore</span><span class="p">)</span> <span class="o">+</span> <span class="n">ignore</span>
<span class="n">loss</span> <span class="o">-=</span> <span class="n">ndarray</span><span class="o">.</span><span class="n">sum</span><span class="p">(</span><span class="n">ndarray</span><span class="o">.</span><span class="n">log</span><span class="p">(</span><span class="n">ndarray</span><span class="o">.</span><span class="n">maximum</span><span class="p">(</span><span class="mf">1e-10</span><span class="p">,</span> <span class="n">pred</span><span class="p">)))</span><span class="o">.</span><span class="n">asscalar</span><span class="p">()</span>
<span class="n">num</span> <span class="o">+=</span> <span class="n">pred</span><span class="o">.</span><span class="n">size</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="n">loss</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="n">num</span></div>
<div class="viewcode-block" id="Perplexity.get"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.Perplexity.get">[docs]</a> <span class="k">def</span> <span class="nf">get</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="sd">"""Returns the current evaluation result.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> Tuple of (str, float)</span>
<span class="sd"> Representing name of the metric and evaluation result.</span>
<span class="sd"> """</span>
<span class="k">return</span> <span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">name</span><span class="p">,</span> <span class="n">math</span><span class="o">.</span><span class="n">exp</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span><span class="o">/</span><span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span><span class="p">))</span></div></div>
<span class="c1">####################</span>
<span class="c1"># REGRESSION METRICS</span>
<span class="c1">####################</span>
<span class="nd">@register</span>
<div class="viewcode-block" id="MAE"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.MAE">[docs]</a><span class="k">class</span> <span class="nc">MAE</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Computes Mean Absolute Error (MAE) loss.</span>
<span class="sd"> The mean absolute error is given by</span>
<span class="sd"> .. math::</span>
<span class="sd"> \\frac{\\sum_i^n |y_i - \\hat{y}_i|}{n}</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> predicts = [mx.nd.array(np.array([3, -0.5, 2, 7]).reshape(4,1))]</span>
<span class="sd"> >>> labels = [mx.nd.array(np.array([2.5, 0.0, 2, 8]).reshape(4,1))]</span>
<span class="sd"> >>> mean_absolute_error = mx.metric.MAE()</span>
<span class="sd"> >>> mean_absolute_error.update(labels = labels, preds = predicts)</span>
<span class="sd"> >>> print mean_absolute_error.get()</span>
<span class="sd"> ('mae', 0.5)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'mae'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">MAE</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<div class="viewcode-block" id="MAE.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.MAE.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="n">labels</span><span class="p">,</span> <span class="n">preds</span> <span class="o">=</span> <span class="n">check_label_shapes</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="k">for</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">pred</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="mi">1</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">pred</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">pred</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">pred</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="mi">1</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="n">numpy</span><span class="o">.</span><span class="n">abs</span><span class="p">(</span><span class="n">label</span> <span class="o">-</span> <span class="n">pred</span><span class="p">)</span><span class="o">.</span><span class="n">mean</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="mi">1</span> <span class="c1"># numpy.prod(label.shape)</span></div></div>
<span class="nd">@register</span>
<div class="viewcode-block" id="MSE"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.MSE">[docs]</a><span class="k">class</span> <span class="nc">MSE</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Computes Mean Squared Error (MSE) loss.</span>
<span class="sd"> The mean squared error is given by</span>
<span class="sd"> .. math::</span>
<span class="sd"> \\frac{\\sum_i^n (y_i - \\hat{y}_i)^2}{n}</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> predicts = [mx.nd.array(np.array([3, -0.5, 2, 7]).reshape(4,1))]</span>
<span class="sd"> >>> labels = [mx.nd.array(np.array([2.5, 0.0, 2, 8]).reshape(4,1))]</span>
<span class="sd"> >>> mean_squared_error = mx.metric.MSE()</span>
<span class="sd"> >>> mean_squared_error.update(labels = labels, preds = predicts)</span>
<span class="sd"> >>> print mean_squared_error.get()</span>
<span class="sd"> ('mse', 0.375)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'mse'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">MSE</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<div class="viewcode-block" id="MSE.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.MSE.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="n">labels</span><span class="p">,</span> <span class="n">preds</span> <span class="o">=</span> <span class="n">check_label_shapes</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="k">for</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">pred</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="mi">1</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">pred</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">pred</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">pred</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="mi">1</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="p">((</span><span class="n">label</span> <span class="o">-</span> <span class="n">pred</span><span class="p">)</span><span class="o">**</span><span class="mf">2.0</span><span class="p">)</span><span class="o">.</span><span class="n">mean</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="mi">1</span> <span class="c1"># numpy.prod(label.shape)</span></div></div>
<span class="nd">@register</span>
<div class="viewcode-block" id="RMSE"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.RMSE">[docs]</a><span class="k">class</span> <span class="nc">RMSE</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Computes Root Mean Squred Error (RMSE) loss.</span>
<span class="sd"> The root mean squared error is given by</span>
<span class="sd"> .. math::</span>
<span class="sd"> \\sqrt{\\frac{\\sum_i^n (y_i - \\hat{y}_i)^2}{n}}</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> predicts = [mx.nd.array(np.array([3, -0.5, 2, 7]).reshape(4,1))]</span>
<span class="sd"> >>> labels = [mx.nd.array(np.array([2.5, 0.0, 2, 8]).reshape(4,1))]</span>
<span class="sd"> >>> root_mean_squared_error = mx.metric.RMSE()</span>
<span class="sd"> >>> root_mean_squared_error.update(labels = labels, preds = predicts)</span>
<span class="sd"> >>> print root_mean_squared_error.get()</span>
<span class="sd"> ('rmse', 0.612372457981)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'rmse'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">RMSE</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<div class="viewcode-block" id="RMSE.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.RMSE.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="n">labels</span><span class="p">,</span> <span class="n">preds</span> <span class="o">=</span> <span class="n">check_label_shapes</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="k">for</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">pred</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="mi">1</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">pred</span><span class="o">.</span><span class="n">shape</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">pred</span><span class="o">.</span><span class="n">reshape</span><span class="p">(</span><span class="n">pred</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="mi">1</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="n">numpy</span><span class="o">.</span><span class="n">sqrt</span><span class="p">(((</span><span class="n">label</span> <span class="o">-</span> <span class="n">pred</span><span class="p">)</span><span class="o">**</span><span class="mf">2.0</span><span class="p">)</span><span class="o">.</span><span class="n">mean</span><span class="p">())</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="mi">1</span></div></div>
<span class="nd">@register</span>
<span class="nd">@alias</span><span class="p">(</span><span class="s1">'ce'</span><span class="p">)</span>
<div class="viewcode-block" id="CrossEntropy"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.CrossEntropy">[docs]</a><span class="k">class</span> <span class="nc">CrossEntropy</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Computes Cross Entropy loss.</span>
<span class="sd"> The cross entropy over a batch of sample size :math:`N` is given by</span>
<span class="sd"> .. math::</span>
<span class="sd"> -\\sum_{n=1}^{N}\\sum_{k=1}^{K}t_{nk}\\log (y_{nk}),</span>
<span class="sd"> where :math:`t_{nk}=1` if and only if sample :math:`n` belongs to class :math:`k`.</span>
<span class="sd"> :math:`y_{nk}` denotes the probability of sample :math:`n` belonging to</span>
<span class="sd"> class :math:`k`.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> eps : float</span>
<span class="sd"> Cross Entropy loss is undefined for predicted value is 0 or 1,</span>
<span class="sd"> so predicted values are added with the small constant.</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])]</span>
<span class="sd"> >>> labels = [mx.nd.array([0, 1, 1])]</span>
<span class="sd"> >>> ce = mx.metric.CrossEntropy()</span>
<span class="sd"> >>> ce.update(labels, predicts)</span>
<span class="sd"> >>> print ce.get()</span>
<span class="sd"> ('cross-entropy', 0.57159948348999023)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">eps</span><span class="o">=</span><span class="mf">1e-12</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'cross-entropy'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">CrossEntropy</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">eps</span><span class="o">=</span><span class="n">eps</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">eps</span> <span class="o">=</span> <span class="n">eps</span>
<div class="viewcode-block" id="CrossEntropy.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.CrossEntropy.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="n">labels</span><span class="p">,</span> <span class="n">preds</span> <span class="o">=</span> <span class="n">check_label_shapes</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="k">for</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">pred</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">ravel</span><span class="p">()</span>
<span class="k">assert</span> <span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">==</span> <span class="n">pred</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
<span class="n">prob</span> <span class="o">=</span> <span class="n">pred</span><span class="p">[</span><span class="n">numpy</span><span class="o">.</span><span class="n">arange</span><span class="p">(</span><span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]),</span> <span class="n">numpy</span><span class="o">.</span><span class="n">int64</span><span class="p">(</span><span class="n">label</span><span class="p">)]</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="p">(</span><span class="o">-</span><span class="n">numpy</span><span class="o">.</span><span class="n">log</span><span class="p">(</span><span class="n">prob</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">eps</span><span class="p">))</span><span class="o">.</span><span class="n">sum</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span></div></div>
<span class="nd">@register</span>
<span class="nd">@alias</span><span class="p">(</span><span class="s1">'nll_loss'</span><span class="p">)</span>
<div class="viewcode-block" id="NegativeLogLikelihood"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.NegativeLogLikelihood">[docs]</a><span class="k">class</span> <span class="nc">NegativeLogLikelihood</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Computes the negative log-likelihood loss.</span>
<span class="sd"> The negative log-likelihoodd loss over a batch of sample size :math:`N` is given by</span>
<span class="sd"> .. math::</span>
<span class="sd"> -\\sum_{n=1}^{N}\\sum_{k=1}^{K}t_{nk}\\log (y_{nk}),</span>
<span class="sd"> where :math:`K` is the number of classes, :math:`y_{nk}` is the prediceted probability for</span>
<span class="sd"> :math:`k`-th class for :math:`n`-th sample. :math:`t_{nk}=1` if and only if sample</span>
<span class="sd"> :math:`n` belongs to class :math:`k`.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> eps : float</span>
<span class="sd"> Negative log-likelihood loss is undefined for predicted value is 0,</span>
<span class="sd"> so predicted values are added with the small constant.</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])]</span>
<span class="sd"> >>> labels = [mx.nd.array([0, 1, 1])]</span>
<span class="sd"> >>> nll_loss = mx.metric.NegativeLogLikelihood()</span>
<span class="sd"> >>> nll_loss.update(labels, predicts)</span>
<span class="sd"> >>> print nll_loss.get()</span>
<span class="sd"> ('nll-loss', 0.57159948348999023)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">eps</span><span class="o">=</span><span class="mf">1e-12</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'nll-loss'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">NegativeLogLikelihood</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">eps</span><span class="o">=</span><span class="n">eps</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">eps</span> <span class="o">=</span> <span class="n">eps</span>
<div class="viewcode-block" id="NegativeLogLikelihood.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.NegativeLogLikelihood.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="n">labels</span><span class="p">,</span> <span class="n">preds</span> <span class="o">=</span> <span class="n">check_label_shapes</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="k">for</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">pred</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">ravel</span><span class="p">()</span>
<span class="n">num_examples</span> <span class="o">=</span> <span class="n">pred</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
<span class="k">assert</span> <span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="o">==</span> <span class="n">num_examples</span><span class="p">,</span> <span class="p">(</span><span class="n">label</span><span class="o">.</span><span class="n">shape</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">num_examples</span><span class="p">)</span>
<span class="n">prob</span> <span class="o">=</span> <span class="n">pred</span><span class="p">[</span><span class="n">numpy</span><span class="o">.</span><span class="n">arange</span><span class="p">(</span><span class="n">num_examples</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">numpy</span><span class="o">.</span><span class="n">int64</span><span class="p">),</span> <span class="n">numpy</span><span class="o">.</span><span class="n">int64</span><span class="p">(</span><span class="n">label</span><span class="p">)]</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="p">(</span><span class="o">-</span><span class="n">numpy</span><span class="o">.</span><span class="n">log</span><span class="p">(</span><span class="n">prob</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">eps</span><span class="p">))</span><span class="o">.</span><span class="n">sum</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="n">num_examples</span></div></div>
<span class="nd">@register</span>
<span class="nd">@alias</span><span class="p">(</span><span class="s1">'pearsonr'</span><span class="p">)</span>
<div class="viewcode-block" id="PearsonCorrelation"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.PearsonCorrelation">[docs]</a><span class="k">class</span> <span class="nc">PearsonCorrelation</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Computes Pearson correlation.</span>
<span class="sd"> The pearson correlation is given by</span>
<span class="sd"> .. math::</span>
<span class="sd"> \\frac{cov(y, \\hat{y})}{\\sigma{y}\\sigma{\\hat{y}}}</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])]</span>
<span class="sd"> >>> labels = [mx.nd.array([[1, 0], [0, 1], [0, 1]])]</span>
<span class="sd"> >>> pr = mx.metric.PearsonCorrelation()</span>
<span class="sd"> >>> pr.update(labels, predicts)</span>
<span class="sd"> >>> print pr.get()</span>
<span class="sd"> ('pearson-correlation', 0.42163704544016178)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'pearsonr'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">PearsonCorrelation</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<div class="viewcode-block" id="PearsonCorrelation.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.PearsonCorrelation.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="n">labels</span><span class="p">,</span> <span class="n">preds</span> <span class="o">=</span> <span class="n">check_label_shapes</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="k">for</span> <span class="n">label</span><span class="p">,</span> <span class="n">pred</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="n">check_label_shapes</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">pred</span><span class="p">,</span> <span class="kc">False</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">pred</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="n">numpy</span><span class="o">.</span><span class="n">corrcoef</span><span class="p">(</span><span class="n">pred</span><span class="o">.</span><span class="n">ravel</span><span class="p">(),</span> <span class="n">label</span><span class="o">.</span><span class="n">ravel</span><span class="p">())[</span><span class="mi">0</span><span class="p">,</span> <span class="mi">1</span><span class="p">]</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="mi">1</span></div></div>
<span class="nd">@register</span>
<div class="viewcode-block" id="Loss"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.Loss">[docs]</a><span class="k">class</span> <span class="nc">Loss</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Dummy metric for directly printing loss.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'loss'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">Loss</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">_</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">preds</span><span class="p">,</span> <span class="n">ndarray</span><span class="o">.</span><span class="n">ndarray</span><span class="o">.</span><span class="n">NDArray</span><span class="p">):</span>
<span class="n">preds</span> <span class="o">=</span> <span class="p">[</span><span class="n">preds</span><span class="p">]</span>
<span class="k">for</span> <span class="n">pred</span> <span class="ow">in</span> <span class="n">preds</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="n">ndarray</span><span class="o">.</span><span class="n">sum</span><span class="p">(</span><span class="n">pred</span><span class="p">)</span><span class="o">.</span><span class="n">asscalar</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="n">pred</span><span class="o">.</span><span class="n">size</span></div>
<span class="nd">@register</span>
<div class="viewcode-block" id="Torch"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.Torch">[docs]</a><span class="k">class</span> <span class="nc">Torch</span><span class="p">(</span><span class="n">Loss</span><span class="p">):</span>
<span class="sd">"""Dummy metric for torch criterions."""</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'torch'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">Torch</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span></div>
<span class="nd">@register</span>
<div class="viewcode-block" id="Caffe"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.Caffe">[docs]</a><span class="k">class</span> <span class="nc">Caffe</span><span class="p">(</span><span class="n">Loss</span><span class="p">):</span>
<span class="sd">"""Dummy metric for caffe criterions."""</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="s1">'caffe'</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="nb">super</span><span class="p">(</span><span class="n">Caffe</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span></div>
<span class="nd">@register</span>
<div class="viewcode-block" id="CustomMetric"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.CustomMetric">[docs]</a><span class="k">class</span> <span class="nc">CustomMetric</span><span class="p">(</span><span class="n">EvalMetric</span><span class="p">):</span>
<span class="sd">"""Computes a customized evaluation metric.</span>
<span class="sd"> The `feval` function can return a `tuple` of (sum_metric, num_inst) or return</span>
<span class="sd"> an `int` sum_metric.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> feval : callable(label, pred)</span>
<span class="sd"> Customized evaluation function.</span>
<span class="sd"> name : str, optional</span>
<span class="sd"> The name of the metric. (the default is None).</span>
<span class="sd"> allow_extra_outputs : bool, optional</span>
<span class="sd"> If true, the prediction outputs can have extra outputs.</span>
<span class="sd"> This is useful in RNN, where the states are also produced</span>
<span class="sd"> in outputs for forwarding. (the default is False).</span>
<span class="sd"> name : str</span>
<span class="sd"> Name of this metric instance for display.</span>
<span class="sd"> output_names : list of str, or None</span>
<span class="sd"> Name of predictions that should be used when updating with update_dict.</span>
<span class="sd"> By default include all predictions.</span>
<span class="sd"> label_names : list of str, or None</span>
<span class="sd"> Name of labels that should be used when updating with update_dict.</span>
<span class="sd"> By default include all labels.</span>
<span class="sd"> Examples</span>
<span class="sd"> --------</span>
<span class="sd"> >>> predicts = [mx.nd.array(np.array([3, -0.5, 2, 7]).reshape(4,1))]</span>
<span class="sd"> >>> labels = [mx.nd.array(np.array([2.5, 0.0, 2, 8]).reshape(4,1))]</span>
<span class="sd"> >>> feval = lambda x, y : (x + y).mean()</span>
<span class="sd"> >>> eval_metrics = mx.metric.CustomMetric(feval=feval)</span>
<span class="sd"> >>> eval_metrics.update(labels, predicts)</span>
<span class="sd"> >>> print eval_metrics.get()</span>
<span class="sd"> ('custom(<lambda>)', 6.0)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">feval</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">allow_extra_outputs</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="k">if</span> <span class="n">name</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
<span class="n">name</span> <span class="o">=</span> <span class="n">feval</span><span class="o">.</span><span class="vm">__name__</span>
<span class="k">if</span> <span class="n">name</span><span class="o">.</span><span class="n">find</span><span class="p">(</span><span class="s1">'<'</span><span class="p">)</span> <span class="o">!=</span> <span class="o">-</span><span class="mi">1</span><span class="p">:</span>
<span class="n">name</span> <span class="o">=</span> <span class="s1">'custom(</span><span class="si">%s</span><span class="s1">)'</span> <span class="o">%</span> <span class="n">name</span>
<span class="nb">super</span><span class="p">(</span><span class="n">CustomMetric</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">name</span><span class="p">,</span> <span class="n">feval</span><span class="o">=</span><span class="n">feval</span><span class="p">,</span>
<span class="n">allow_extra_outputs</span><span class="o">=</span><span class="n">allow_extra_outputs</span><span class="p">,</span>
<span class="n">output_names</span><span class="o">=</span><span class="n">output_names</span><span class="p">,</span> <span class="n">label_names</span><span class="o">=</span><span class="n">label_names</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_feval</span> <span class="o">=</span> <span class="n">feval</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_allow_extra_outputs</span> <span class="o">=</span> <span class="n">allow_extra_outputs</span>
<div class="viewcode-block" id="CustomMetric.update"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.CustomMetric.update">[docs]</a> <span class="k">def</span> <span class="nf">update</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">):</span>
<span class="sd">"""Updates the internal evaluation result.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> labels : list of `NDArray`</span>
<span class="sd"> The labels of the data.</span>
<span class="sd"> preds : list of `NDArray`</span>
<span class="sd"> Predicted values.</span>
<span class="sd"> """</span>
<span class="k">if</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">_allow_extra_outputs</span><span class="p">:</span>
<span class="n">labels</span><span class="p">,</span> <span class="n">preds</span> <span class="o">=</span> <span class="n">check_label_shapes</span><span class="p">(</span><span class="n">labels</span><span class="p">,</span> <span class="n">preds</span><span class="p">,</span> <span class="kc">True</span><span class="p">)</span>
<span class="k">for</span> <span class="n">pred</span><span class="p">,</span> <span class="n">label</span> <span class="ow">in</span> <span class="nb">zip</span><span class="p">(</span><span class="n">preds</span><span class="p">,</span> <span class="n">labels</span><span class="p">):</span>
<span class="n">label</span> <span class="o">=</span> <span class="n">label</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="n">pred</span> <span class="o">=</span> <span class="n">pred</span><span class="o">.</span><span class="n">asnumpy</span><span class="p">()</span>
<span class="n">reval</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_feval</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">pred</span><span class="p">)</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">reval</span><span class="p">,</span> <span class="nb">tuple</span><span class="p">):</span>
<span class="p">(</span><span class="n">sum_metric</span><span class="p">,</span> <span class="n">num_inst</span><span class="p">)</span> <span class="o">=</span> <span class="n">reval</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="n">sum_metric</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="n">num_inst</span>
<span class="k">else</span><span class="p">:</span>
<span class="bp">self</span><span class="o">.</span><span class="n">sum_metric</span> <span class="o">+=</span> <span class="n">reval</span>
<span class="bp">self</span><span class="o">.</span><span class="n">num_inst</span> <span class="o">+=</span> <span class="mi">1</span></div>
<span class="k">def</span> <span class="nf">get_config</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">(</span><span class="s2">"CustomMetric cannot be serialized"</span><span class="p">)</span></div>
<span class="c1"># pylint: disable=invalid-name</span>
<div class="viewcode-block" id="np"><a class="viewcode-back" href="../../api/python/metric/metric.html#mxnet.metric.np">[docs]</a><span class="k">def</span> <span class="nf">np</span><span class="p">(</span><span class="n">numpy_feval</span><span class="p">,</span> <span class="n">name</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">allow_extra_outputs</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
<span class="sd">"""Creates a custom evaluation metric that receives its inputs as numpy arrays.</span>
<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> numpy_feval : callable(label, pred)</span>
<span class="sd"> Custom evaluation function that receives labels and predictions for a minibatch</span>
<span class="sd"> as numpy arrays and returns the corresponding custom metric as a floating point number.</span>
<span class="sd"> name : str, optional</span>
<span class="sd"> Name of the custom metric.</span>
<span class="sd"> allow_extra_outputs : bool, optional</span>
<span class="sd"> Whether prediction output is allowed to have extra outputs. This is useful in cases</span>
<span class="sd"> like RNN where states are also part of output which can then be fed back to the RNN</span>
<span class="sd"> in the next step. By default, extra outputs are not allowed.</span>
<span class="sd"> Returns</span>
<span class="sd"> -------</span>
<span class="sd"> float</span>
<span class="sd"> Custom metric corresponding to the provided labels and predictions.</span>
<span class="sd"> Example</span>
<span class="sd"> -------</span>
<span class="sd"> >>> def custom_metric(label, pred):</span>
<span class="sd"> ... return np.mean(np.abs(label-pred))</span>
<span class="sd"> ...</span>
<span class="sd"> >>> metric = mx.metric.np(custom_metric)</span>
<span class="sd"> """</span>
<span class="k">def</span> <span class="nf">feval</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">pred</span><span class="p">):</span>
<span class="sd">"""Internal eval function."""</span>
<span class="k">return</span> <span class="n">numpy_feval</span><span class="p">(</span><span class="n">label</span><span class="p">,</span> <span class="n">pred</span><span class="p">)</span>
<span class="n">feval</span><span class="o">.</span><span class="vm">__name__</span> <span class="o">=</span> <span class="n">numpy_feval</span><span class="o">.</span><span class="vm">__name__</span>
<span class="k">return</span> <span class="n">CustomMetric</span><span class="p">(</span><span class="n">feval</span><span class="p">,</span> <span class="n">name</span><span class="p">,</span> <span class="n">allow_extra_outputs</span><span class="p">)</span></div>
<span class="c1"># pylint: enable=invalid-name</span>
</pre></div>
</div>
</div>
<div aria-label="main navigation" class="sphinxsidebar rightsidebar" role="navigation">
<div class="sphinxsidebarwrapper">
</div>
</div>
</div><div class="footer">
<div class="section-disclaimer">
<div class="container">
<div>
<img height="60" src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/image/apache_incubator_logo.png"/>
<p>
Apache MXNet is an effort undergoing incubation at The Apache Software Foundation (ASF), <strong>sponsored by the <i>Apache Incubator</i></strong>. Incubation is required of all newly accepted projects until a further review indicates that the infrastructure, communications, and decision making process have stabilized in a manner consistent with other successful ASF projects. While incubation status is not necessarily a reflection of the completeness or stability of the code, it does indicate that the project has yet to be fully endorsed by the ASF.
</p>
<p>
"Copyright © 2017-2018, The Apache Software Foundation
Apache MXNet, MXNet, Apache, the Apache feather, and the Apache MXNet project logo are either registered trademarks or trademarks of the Apache Software Foundation."
</p>
</div>
</div>
</div>
</div> <!-- pagename != index -->
</div>
<script crossorigin="anonymous" integrity="sha384-0mSbJDEHialfmuBBQP6A4Qrprq5OVfW37PRR3j5ELqxss1yVqOtnepnHVP9aJ7xS" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/js/bootstrap.min.js"></script>
<script src="../../_static/js/sidebar.js" type="text/javascript"></script>
<script src="../../_static/js/search.js" type="text/javascript"></script>
<script src="../../_static/js/navbar.js" type="text/javascript"></script>
<script src="../../_static/js/clipboard.min.js" type="text/javascript"></script>
<script src="../../_static/js/copycode.js" type="text/javascript"></script>
<script src="../../_static/js/page.js" type="text/javascript"></script>
<script type="text/javascript">
$('body').ready(function () {
$('body').css('visibility', 'visible');
});
</script>
</body>
</html>