blob: 317af1b5d5b0ca881f541ce1c50db9e76fb9bb41 [file] [log] [blame]
<!DOCTYPE html><html lang="en"><head><meta charSet="utf-8"/><meta http-equiv="X-UA-Compatible" content="IE=edge"/><title>Tensor · Apache SINGA</title><meta name="viewport" content="width=device-width"/><meta name="generator" content="Docusaurus"/><meta name="description" content="&lt;!--- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the &quot;License&quot;); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an &quot;AS IS&quot; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --&gt;"/><meta name="docsearch:version" content="4.0.0_Chinese"/><meta name="docsearch:language" content="en"/><meta property="og:title" content="Tensor · Apache SINGA"/><meta property="og:type" content="website"/><meta property="og:url" content="https://singa.apache.org/"/><meta property="og:description" content="&lt;!--- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the &quot;License&quot;); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an &quot;AS IS&quot; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --&gt;"/><meta property="og:image" content="https://singa.apache.org/img/singa_twitter_banner.jpeg"/><meta name="twitter:card" content="summary"/><meta name="twitter:image" content="https://singa.apache.org/img/singa_twitter_banner.jpeg"/><link rel="shortcut icon" href="/img/favicon.ico"/><link rel="stylesheet" href="https://cdn.jsdelivr.net/docsearch.js/1/docsearch.min.css"/><link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.12.0/styles/atom-one-dark.min.css"/><link rel="alternate" type="application/atom+xml" href="https://singa.apache.org/blog/atom.xml" title="Apache SINGA Blog ATOM Feed"/><link rel="alternate" type="application/rss+xml" href="https://singa.apache.org/blog/feed.xml" title="Apache SINGA Blog RSS Feed"/><link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Source+Sans+Pro:400,400i,700"/><link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=Baloo+Paaji+2&amp;family=Source+Sans+Pro:wght@200;300&amp;display=swap"/><script type="text/javascript" src="https://buttons.github.io/buttons.js"></script><script src="https://unpkg.com/vanilla-back-to-top@7.1.14/dist/vanilla-back-to-top.min.js"></script><script>
document.addEventListener('DOMContentLoaded', function() {
addBackToTop(
{"zIndex":100}
)
});
</script><script src="/js/scrollSpy.js"></script><link rel="stylesheet" href="/css/main.css"/><script src="/js/codetabs.js"></script></head><body class="sideNavVisible separateOnPageNav"><div class="fixedHeaderContainer"><div class="headerWrapper wrapper"><header><a href="/"><img class="logo" src="/img/singa.png" alt="Apache SINGA"/></a><a href="/versions"><h3>4.0.0_Chinese</h3></a><div class="navigationWrapper navigationSlider"><nav class="slidingNav"><ul class="nav-site nav-site-internal"><li class="siteNavGroupActive"><a href="/docs/4.0.0_Chinese/installation" target="_self">Docs</a></li><li class=""><a href="/docs/4.0.0_Chinese/source-repository" target="_self">Community</a></li><li class=""><a href="/blog/" target="_self">News</a></li><li class=""><a href="https://apache-singa.readthedocs.io/en/latest/" target="_self">API</a></li><li class="navSearchWrapper reactNavSearchWrapper"><input type="text" id="search_input_react" placeholder="Search" title="Search"/></li><li class=""><a href="https://github.com/apache/singa" target="_self">GitHub</a></li></ul></nav></div></header></div></div><div class="navPusher"><div class="docMainWrapper wrapper"><div class="docsNavContainer" id="docsNav"><nav class="toc"><div class="toggleNav"><section class="navWrapper wrapper"><div class="navBreadcrumb wrapper"><div class="navToggle" id="navToggler"><div class="hamburger-menu"><div class="line1"></div><div class="line2"></div><div class="line3"></div></div></div><h2><i></i><span>Guides</span></h2><div class="tocToggler" id="tocToggler"><i class="icon-toc"></i></div></div><div class="navGroups"><div class="navGroup"><h3 class="navGroupCategoryTitle">Getting Started</h3><ul class=""><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/installation">Installation</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/software-stack">Software Stack</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/examples">Examples</a></li></ul></div><div class="navGroup"><h3 class="navGroupCategoryTitle">Guides</h3><ul class=""><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/device">Device</a></li><li class="navListItem navListItemActive"><a class="navItem" href="/docs/4.0.0_Chinese/tensor">Tensor</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/autograd">Autograd</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/optimizer">Optimizer</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/graph">Model</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/onnx">ONNX</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/dist-train">Distributed Training</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/time-profiling">Time Profiling</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/half-precision">Half Precision</a></li></ul></div><div class="navGroup"><h3 class="navGroupCategoryTitle">Development</h3><ul class=""><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/downloads">Download SINGA</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/build">Build SINGA from Source</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/contribute-code">How to Contribute Code</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/contribute-docs">How to Contribute to Documentation</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/how-to-release">How to Prepare a Release</a></li><li class="navListItem"><a class="navItem" href="/docs/4.0.0_Chinese/git-workflow">Git Workflow</a></li></ul></div></div></section></div><script>
var coll = document.getElementsByClassName('collapsible');
var checkActiveCategory = true;
for (var i = 0; i < coll.length; i++) {
var links = coll[i].nextElementSibling.getElementsByTagName('*');
if (checkActiveCategory){
for (var j = 0; j < links.length; j++) {
if (links[j].classList.contains('navListItemActive')){
coll[i].nextElementSibling.classList.toggle('hide');
coll[i].childNodes[1].classList.toggle('rotate');
checkActiveCategory = false;
break;
}
}
}
coll[i].addEventListener('click', function() {
var arrow = this.childNodes[1];
arrow.classList.toggle('rotate');
var content = this.nextElementSibling;
content.classList.toggle('hide');
});
}
document.addEventListener('DOMContentLoaded', function() {
createToggler('#navToggler', '#docsNav', 'docsSliderActive');
createToggler('#tocToggler', 'body', 'tocActive');
var headings = document.querySelector('.toc-headings');
headings && headings.addEventListener('click', function(event) {
var el = event.target;
while(el !== headings){
if (el.tagName === 'A') {
document.body.classList.remove('tocActive');
break;
} else{
el = el.parentNode;
}
}
}, false);
function createToggler(togglerSelector, targetSelector, className) {
var toggler = document.querySelector(togglerSelector);
var target = document.querySelector(targetSelector);
if (!toggler) {
return;
}
toggler.onclick = function(event) {
event.preventDefault();
target.classList.toggle(className);
};
}
});
</script></nav></div><div class="container mainContainer docsContainer"><div class="wrapper"><div class="post"><header class="postHeader"><a class="edit-page-link button" href="https://github.com/apache/singa-doc/blob/master/docs-site/docs/tensor.md" target="_blank" rel="noreferrer noopener">Edit</a><h1 id="__docusaurus" class="postHeaderTitle">Tensor</h1></header><article><div><span><!--- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -->
<p>每个Tensor实例都是分配在特定设备实例上的多维数组。Tensor实例可以存储变量,并在不同类型的硬件设备上提供线性代数运算,而无需用户察觉。需要注意的是,除了复制函数外,用户需要确保张量操作数分配在同一个设备上。</p>
<h2><a class="anchor" aria-hidden="true" id="tensor用法"></a><a href="#tensor用法" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>Tensor用法</h2>
<h3><a class="anchor" aria-hidden="true" id="创建tensor"></a><a href="#创建tensor" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>创建Tensor</h3>
<pre><code class="hljs css language-python"><span class="hljs-meta">&gt;&gt;&gt; </span><span class="hljs-keyword">import</span> numpy <span class="hljs-keyword">as</span> np
<span class="hljs-meta">&gt;&gt;&gt; </span><span class="hljs-keyword">from</span> singa <span class="hljs-keyword">import</span> tensor
<span class="hljs-meta">&gt;&gt;&gt; </span>tensor.from_numpy( np.asarray([[<span class="hljs-number">1</span>, <span class="hljs-number">0</span>, <span class="hljs-number">0</span>], [<span class="hljs-number">0</span>, <span class="hljs-number">1</span>, <span class="hljs-number">0</span>]], dtype=np.float32) )
[[<span class="hljs-number">1.</span> <span class="hljs-number">0.</span> <span class="hljs-number">0.</span>]
[<span class="hljs-number">0.</span> <span class="hljs-number">1.</span> <span class="hljs-number">0.</span>]]
</code></pre>
<h3><a class="anchor" aria-hidden="true" id="转换到numpy"></a><a href="#转换到numpy" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>转换到numpy</h3>
<pre><code class="hljs css language-python"><span class="hljs-meta">&gt;&gt;&gt; </span>a = np.asarray([[<span class="hljs-number">1</span>, <span class="hljs-number">0</span>, <span class="hljs-number">0</span>], [<span class="hljs-number">0</span>, <span class="hljs-number">1</span>, <span class="hljs-number">0</span>]], dtype=np.float32)
<span class="hljs-meta">&gt;&gt;&gt; </span>tensor.from_numpy(a)
[[<span class="hljs-number">1.</span> <span class="hljs-number">0.</span> <span class="hljs-number">0.</span>]
[<span class="hljs-number">0.</span> <span class="hljs-number">1.</span> <span class="hljs-number">0.</span>]]
<span class="hljs-meta">&gt;&gt;&gt; </span>tensor.to_numpy(tensor.from_numpy(a))
array([[<span class="hljs-number">1.</span>, <span class="hljs-number">0.</span>, <span class="hljs-number">0.</span>],
[<span class="hljs-number">0.</span>, <span class="hljs-number">1.</span>, <span class="hljs-number">0.</span>]], dtype=float32)
</code></pre>
<h3><a class="anchor" aria-hidden="true" id="tensor方法"></a><a href="#tensor方法" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>Tensor方法</h3>
<pre><code class="hljs css language-python"><span class="hljs-meta">&gt;&gt;&gt; </span>t = tensor.from_numpy(a)
<span class="hljs-meta">&gt;&gt;&gt; </span>t.transpose([<span class="hljs-number">1</span>,<span class="hljs-number">0</span>])
[[<span class="hljs-number">1.</span> <span class="hljs-number">0.</span>]
[<span class="hljs-number">0.</span> <span class="hljs-number">1.</span>]
[<span class="hljs-number">0.</span> <span class="hljs-number">0.</span>]]
</code></pre>
<p><code>Tensor</code>变换,最多支持6维。</p>
<pre><code class="hljs css language-python"><span class="hljs-meta">&gt;&gt;&gt; </span>a = tensor.random((<span class="hljs-number">2</span>,<span class="hljs-number">3</span>,<span class="hljs-number">4</span>,<span class="hljs-number">5</span>,<span class="hljs-number">6</span>,<span class="hljs-number">7</span>))
<span class="hljs-meta">&gt;&gt;&gt; </span>a.shape
(<span class="hljs-number">2</span>, <span class="hljs-number">3</span>, <span class="hljs-number">4</span>, <span class="hljs-number">5</span>, <span class="hljs-number">6</span>, <span class="hljs-number">7</span>)
<span class="hljs-meta">&gt;&gt;&gt; </span>a.reshape((<span class="hljs-number">2</span>,<span class="hljs-number">3</span>,<span class="hljs-number">4</span>,<span class="hljs-number">5</span>,<span class="hljs-number">7</span>,<span class="hljs-number">6</span>)).transpose((<span class="hljs-number">3</span>,<span class="hljs-number">2</span>,<span class="hljs-number">1</span>,<span class="hljs-number">0</span>,<span class="hljs-number">4</span>,<span class="hljs-number">5</span>)).shape
(<span class="hljs-number">5</span>, <span class="hljs-number">4</span>, <span class="hljs-number">3</span>, <span class="hljs-number">2</span>, <span class="hljs-number">7</span>, <span class="hljs-number">6</span>)
</code></pre>
<h3><a class="anchor" aria-hidden="true" id="tensor算术方法"></a><a href="#tensor算术方法" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>Tensor算术方法</h3>
<p><code>tensor</code>是实时计算的:</p>
<pre><code class="hljs css language-python"><span class="hljs-meta">&gt;&gt;&gt; </span>t + <span class="hljs-number">1</span>
[[<span class="hljs-number">2.</span> <span class="hljs-number">1.</span> <span class="hljs-number">1.</span>]
[<span class="hljs-number">1.</span> <span class="hljs-number">2.</span> <span class="hljs-number">1.</span>]]
<span class="hljs-meta">&gt;&gt;&gt; </span>t / <span class="hljs-number">5</span>
[[<span class="hljs-number">0.2</span> <span class="hljs-number">0.</span> <span class="hljs-number">0.</span> ]
[<span class="hljs-number">0.</span> <span class="hljs-number">0.2</span> <span class="hljs-number">0.</span> ]]
</code></pre>
<p><code>tensor</code> broadcasting运算:</p>
<pre><code class="hljs css language-python"><span class="hljs-meta">&gt;&gt;&gt; </span>a
[[<span class="hljs-number">1.</span> <span class="hljs-number">2.</span> <span class="hljs-number">3.</span>]
[<span class="hljs-number">4.</span> <span class="hljs-number">5.</span> <span class="hljs-number">6.</span>]]
<span class="hljs-meta">&gt;&gt;&gt; </span>b
[[<span class="hljs-number">1.</span> <span class="hljs-number">2.</span> <span class="hljs-number">3.</span>]]
<span class="hljs-meta">&gt;&gt;&gt; </span>a + b
[[<span class="hljs-number">2.</span> <span class="hljs-number">4.</span> <span class="hljs-number">6.</span>]
[<span class="hljs-number">5.</span> <span class="hljs-number">7.</span> <span class="hljs-number">9.</span>]]
<span class="hljs-meta">&gt;&gt;&gt; </span>a * b
[[ <span class="hljs-number">1.</span> <span class="hljs-number">4.</span> <span class="hljs-number">9.</span>]
[ <span class="hljs-number">4.</span> <span class="hljs-number">10.</span> <span class="hljs-number">18.</span>]]
<span class="hljs-meta">&gt;&gt;&gt; </span>a / b
[[<span class="hljs-number">1.</span> <span class="hljs-number">1.</span> <span class="hljs-number">1.</span> ]
[<span class="hljs-number">4.</span> <span class="hljs-number">2.5</span> <span class="hljs-number">2.</span> ]]
<span class="hljs-meta">&gt;&gt;&gt; </span>a/=b <span class="hljs-comment"># inplace operation</span>
<span class="hljs-meta">&gt;&gt;&gt; </span>a
[[<span class="hljs-number">1.</span> <span class="hljs-number">1.</span> <span class="hljs-number">1.</span> ]
[<span class="hljs-number">4.</span> <span class="hljs-number">2.5</span> <span class="hljs-number">2.</span> ]]
</code></pre>
<p><code>tensor</code> broadcasting矩阵乘法:</p>
<pre><code class="hljs css language-python"><span class="hljs-meta">&gt;&gt;&gt; </span><span class="hljs-keyword">from</span> singa <span class="hljs-keyword">import</span> tensor
<span class="hljs-meta">&gt;&gt;&gt; </span>a = tensor.random((<span class="hljs-number">2</span>,<span class="hljs-number">2</span>,<span class="hljs-number">2</span>,<span class="hljs-number">3</span>))
<span class="hljs-meta">&gt;&gt;&gt; </span>b = tensor.random((<span class="hljs-number">2</span>,<span class="hljs-number">3</span>,<span class="hljs-number">4</span>))
<span class="hljs-meta">&gt;&gt;&gt; </span>tensor.mult(a,b).shape
(<span class="hljs-number">2</span>, <span class="hljs-number">2</span>, <span class="hljs-number">2</span>, <span class="hljs-number">4</span>)
</code></pre>
<h3><a class="anchor" aria-hidden="true" id="tensor函数"></a><a href="#tensor函数" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>Tensor函数</h3>
<p><code>singa.tensor</code>模块中的函数在应用函数中定义的变换后返回新的<code>Tensor</code>对象。</p>
<pre><code class="hljs css language-python"><span class="hljs-meta">&gt;&gt;&gt; </span>tensor.log(t+<span class="hljs-number">1</span>)
[[<span class="hljs-number">0.6931472</span> <span class="hljs-number">0.</span> <span class="hljs-number">0.</span> ]
[<span class="hljs-number">0.</span> <span class="hljs-number">0.6931472</span> <span class="hljs-number">0.</span> ]]
</code></pre>
<h3><a class="anchor" aria-hidden="true" id="tensor在不同devices上"></a><a href="#tensor在不同devices上" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>Tensor在不同Devices上</h3>
<p><code>tensor</code>默认在主机(CPU)上创建;也可以通过指定设备在不同的硬件<code>device</code>上创建。<code>tensor</code>可以通过<code>to_device()</code>函数在<code>device</code>之间移动。</p>
<pre><code class="hljs css language-python"><span class="hljs-meta">&gt;&gt;&gt; </span><span class="hljs-keyword">from</span> singa <span class="hljs-keyword">import</span> device
<span class="hljs-meta">&gt;&gt;&gt; </span>x = tensor.Tensor((<span class="hljs-number">2</span>, <span class="hljs-number">3</span>), device.create_cuda_gpu())
<span class="hljs-meta">&gt;&gt;&gt; </span>x.gaussian(<span class="hljs-number">1</span>,<span class="hljs-number">1</span>)
<span class="hljs-meta">&gt;&gt;&gt; </span>x
[[<span class="hljs-number">1.531889</span> <span class="hljs-number">1.0128608</span> <span class="hljs-number">0.12691343</span>]
[<span class="hljs-number">2.1674204</span> <span class="hljs-number">3.083676</span> <span class="hljs-number">2.7421203</span> ]]
<span class="hljs-meta">&gt;&gt;&gt; </span><span class="hljs-comment"># move to host</span>
<span class="hljs-meta">&gt;&gt;&gt; </span>x.to_device(device.get_default_device())
</code></pre>
<h3><a class="anchor" aria-hidden="true" id="使用tensor训练mlp"></a><a href="#使用tensor训练mlp" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>使用Tensor训练MLP</h3>
<pre><code class="hljs css language-python">
<span class="hljs-string">"""
code snipet from examples/mlp/module.py
"""</span>
label = get_label()
data = get_data()
dev = device.create_cuda_gpu_on(<span class="hljs-number">0</span>)
sgd = opt.SGD(<span class="hljs-number">0.05</span>)
<span class="hljs-comment"># define tensor for input data and label</span>
tx = tensor.Tensor((<span class="hljs-number">400</span>, <span class="hljs-number">2</span>), dev, tensor.float32)
ty = tensor.Tensor((<span class="hljs-number">400</span>,), dev, tensor.int32)
model = MLP(data_size=<span class="hljs-number">2</span>, perceptron_size=<span class="hljs-number">3</span>, num_classes=<span class="hljs-number">2</span>)
<span class="hljs-comment"># attached model to graph</span>
model.set_optimizer(sgd)
model.compile([tx], is_train=<span class="hljs-literal">True</span>, use_graph=<span class="hljs-literal">True</span>, sequential=<span class="hljs-literal">False</span>)
model.train()
<span class="hljs-keyword">for</span> i <span class="hljs-keyword">in</span> range(<span class="hljs-number">1001</span>):
tx.copy_from_numpy(data)
ty.copy_from_numpy(label)
out, loss = model(tx, ty, <span class="hljs-string">'fp32'</span>, spars=<span class="hljs-literal">None</span>)
<span class="hljs-keyword">if</span> i % <span class="hljs-number">100</span> == <span class="hljs-number">0</span>:
print(<span class="hljs-string">"training loss = "</span>, tensor.to_numpy(loss)[<span class="hljs-number">0</span>])
</code></pre>
<p>输出:</p>
<pre><code class="hljs css language-bash">$ python3 examples/mlp/module.py
training loss = 0.6158037
training loss = 0.52852553
training loss = 0.4571422
training loss = 0.37274635
training loss = 0.30146334
training loss = 0.24906921
training loss = 0.21128304
training loss = 0.18390492
training loss = 0.16362564
training loss = 0.148164
training loss = 0.13589878
</code></pre>
<h2><a class="anchor" aria-hidden="true" id="tensor实现"></a><a href="#tensor实现" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>Tensor实现</h2>
<p>上一节介绍了<code>Tensor</code>的一般用法,下面将介绍其底层的实现。首先,将介绍Python和C++ tensors的设计。后面会讲到前端(Python)和后端(C++)如何连接,如何扩展。</p>
<h3><a class="anchor" aria-hidden="true" id="python-tensor"></a><a href="#python-tensor" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>Python Tensor</h3>
<p>Python类<code>Tensor</code>,定义在<code>python/singa/tensor.py</code>中,提供了高层的张量操作,用于实现深度学习操作(通过<a href="./autograd">autograd</a>),以及终端用户的数据管理。</p>
<p>它主要是通过简单地封装C++张量方法来工作,包括算术方法(如<code>sum</code>)和非算术方法(如<code>reshape</code>)。一些高级的算术运算以后会引入,并使用纯Python的张量API来实现,如<code>tensordot</code>。Python Tensor API可以利用灵活的方法轻松实现复杂的神经网络操作。</p>
<h3><a class="anchor" aria-hidden="true" id="c-tensor"></a><a href="#c-tensor" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>C++ Tensor</h3>
<p>C++类<code>Tensor</code>,定义在<code>include/singa/core/tensor.h</code>中,主要是管理存放数据的内存,并提供低级的API用于张量操作。同时,它还通过封装不同的后端(CUDA、BLAS、cuBLAS等)提供各种算术方法(如<code>matmul</code>)。</p>
<h4><a class="anchor" aria-hidden="true" id="执行背景和内存块"></a><a href="#执行背景和内存块" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>执行背景和内存块</h4>
<p>Tensor的两个重要概念或者说数据结构是执行背景<code>device</code>,和内存块<code>Block</code></p>
<p>每个<code>Tensor</code>物理上存储在一个硬件设备上,并由硬件设备管理,代表执行背景(CPU、GPU),Tensor的数学计算是在设备上执行的。</p>
<p>Tensor数据在<code>Block</code>实例中,定义在<code>include/singa/core/common.h</code>中。<code>Block</code>拥有底层数据,而tensors则在描述tensor的元数据上拥有所有权,比如<code>shape</code><code>stride</code></p>
<h4><a class="anchor" aria-hidden="true" id="tensor数学后端"></a><a href="#tensor数学后端" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>Tensor数学后端</h4>
<p>为了利用不同后端硬件设备提供的高效数学库,SINGA为每个支持的后端设备提供了一套Tensor函数的实现。</p>
<ul>
<li>'tensor_math_cpp.h'为CppCPU设备使用Cpp(带CBLAS)实现操作。</li>
<li>'tensor_math_cuda.h'为CudaGPU设备实现了使用Cuda(带cuBLAS)的操作。</li>
<li>'tensor_math_opencl.h'为OpenclGPU设备实现了使用OpenCL的操作。</li>
</ul>
<h3><a class="anchor" aria-hidden="true" id="将c-apis暴露给python"></a><a href="#将c-apis暴露给python" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>将C++ APIs暴露给Python</h3>
<p>SWIG(<a href="http://www.swig.org/)是一个可以自动将C++">http://www.swig.org/)是一个可以自动将C++</a> API转换为Python API的工具。SINGA使用SWIG将C++ APIs公开到Python中。SWIG会生成几个文件,包括<code>python/singa/singa_wrap.py</code>。Python模块(如<code>tensor</code><code>device</code><code>autograd</code>)导入这个模块来调用C++ API来实现Python类和函数。</p>
<pre><code class="hljs css language-python"><span class="hljs-keyword">import</span> tensor
t = tensor.Tensor(shape=(<span class="hljs-number">2</span>, <span class="hljs-number">3</span>))
</code></pre>
<p>例如,当按上面的方法创建Python <code>Tensor</code>实例时,<code>Tensor</code>类的实现会创建一个在<code>singa_wrap.py</code>中定义的Tensor类的实例,它对应于C++ <code>Tensor</code>类。为了清楚起见,<code>singa_wrap.py</code>中的<code>Tensor</code>类在<code>tensor.py</code>中被称为<code>CTensor</code></p>
<pre><code class="hljs css language-python"><span class="hljs-comment"># in tensor.py</span>
<span class="hljs-keyword">from</span> . <span class="hljs-keyword">import</span> singa_wrap <span class="hljs-keyword">as</span> singa
CTensor = singa.Tensor
</code></pre>
<h3><a class="anchor" aria-hidden="true" id="创建新的tensor函数"></a><a href="#创建新的tensor函数" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>创建新的Tensor函数</h3>
<p>有了前面的描述所奠定的基础,扩展张量函数可以用自下而上的方式轻松完成。对于数学运算,其步骤是:</p>
<ul>
<li><code>tensor.h</code>中声明新的API。</li>
<li>使用 <code>tensor.cc</code> 中预定义的宏生成代码,参考 <code>GenUnaryTensorFn(Abs);</code></li>
<li><code>tensor_math.h</code>中声明template 方法/函数。</li>
<li>至少在CPU(<code>tensor_math_cpp.h</code>)和GPU(<code>tensor_math_cuda.h</code>)中进行真正的实现。</li>
<li>将 API 加入到 <code>src/api/core_tensor.i</code> 中,通过 SWIG 公开 API。</li>
<li>通过调用 <code>singa_wrap.py</code> 中自动生成的函数,在 <code>tensor.py</code> 中定义 Python Tensor API。</li>
<li>在适当的地方编写单元测试</li>
</ul>
<h2><a class="anchor" aria-hidden="true" id="python-api"></a><a href="#python-api" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>Python API</h2>
<p><em>进行中</em></p>
<h2><a class="anchor" aria-hidden="true" id="cpp-api"></a><a href="#cpp-api" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2.09V6.25c-1.09.53-2 1.84-2 3.25C6 11.31 7.55 13 9 13h4c1.45 0 3-1.69 3-3.5S14.5 6 13 6z"></path></svg></a>CPP API</h2>
<p><em>进行中</em></p>
</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/4.0.0_Chinese/device"><span class="arrow-prev"></span><span>Device</span></a><a class="docs-next button" href="/docs/4.0.0_Chinese/autograd"><span>Autograd</span><span class="arrow-next"></span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#tensor用法">Tensor用法</a><ul class="toc-headings"><li><a href="#创建tensor">创建Tensor</a></li><li><a href="#转换到numpy">转换到numpy</a></li><li><a href="#tensor方法">Tensor方法</a></li><li><a href="#tensor算术方法">Tensor算术方法</a></li><li><a href="#tensor函数">Tensor函数</a></li><li><a href="#tensor在不同devices上">Tensor在不同Devices上</a></li><li><a href="#使用tensor训练mlp">使用Tensor训练MLP</a></li></ul></li><li><a href="#tensor实现">Tensor实现</a><ul class="toc-headings"><li><a href="#python-tensor">Python Tensor</a></li><li><a href="#c-tensor">C++ Tensor</a></li><li><a href="#将c-apis暴露给python">将C++ APIs暴露给Python</a></li><li><a href="#创建新的tensor函数">创建新的Tensor函数</a></li></ul></li><li><a href="#python-api">Python API</a></li><li><a href="#cpp-api">CPP API</a></li></ul></nav></div><footer class="nav-footer" id="footer"><section class="sitemap"><a href="/" class="nav-home"><img src="/img/singa-logo-square.png" alt="Apache SINGA" width="66" height="58"/></a><div><h5>Docs</h5><a href="/docs/installation">Getting Started</a><a href="/docs/device">Guides</a><a href="/en/https://apache-singa.readthedocs.io/en/latest/">API Reference</a><a href="/docs/examples">Examples</a><a href="/docs/download-singa">Development</a></div><div><h5>Community</h5><a href="/en/users.html">User Showcase</a><a href="/docs/history-singa">SINGA History</a><a href="/docs/team-list">SINGA Team</a><a href="/blog">SINGA News</a><a href="https://github.com/apache/singa">GitHub</a><div class="social"><a class="github-button" href="https://github.com/apache/singa" data-count-href="/apache/singa/stargazers" data-show-count="true" data-count-aria-label="# stargazers on GitHub" aria-label="Star this project on GitHub">apache/singa-doc</a></div><div class="social"><a href="https://twitter.com/ApacheSINGA" class="twitter-follow-button">Follow @ApacheSINGA</a></div></div><div><h5>Apache Software Foundation</h5><a href="https://apache.org/" target="_blank" rel="noreferrer noopener">Foundation</a><a href="http://www.apache.org/licenses/" target="_blank" rel="noreferrer noopener">License</a><a href="http://www.apache.org/foundation/sponsorship.html" target="_blank" rel="noreferrer noopener">Sponsorship</a><a href="http://www.apache.org/foundation/thanks.html" target="_blank" rel="noreferrer noopener">Thanks</a><a href="http://www.apache.org/events/current-event" target="_blank" rel="noreferrer noopener">Events</a><a href="http://www.apache.org/security/" target="_blank" rel="noreferrer noopener">Security</a></div></section><div style="width:100%;text-align:center"><a href="https://apache.org/" target="_blank" rel="noreferrer noopener" class="ApacheOpenSource"><img src="/img/asf_logo_wide.svg" alt="Apache Open Source"/></a><section class="copyright" style="max-width:60%;margin:0 auto">Copyright © 2023
The Apache Software Foundation. All rights reserved.
Apache SINGA, Apache, the Apache feather logo, and
the Apache SINGA project logos are trademarks of The
Apache Software Foundation. All other marks mentioned
may be trademarks or registered trademarks of their
respective owners.</section></div></footer></div><script type="text/javascript" src="https://cdn.jsdelivr.net/docsearch.js/1/docsearch.min.js"></script><script>window.twttr=(function(d,s, id){var js,fjs=d.getElementsByTagName(s)[0],t=window.twttr||{};if(d.getElementById(id))return t;js=d.createElement(s);js.id=id;js.src='https://platform.twitter.com/widgets.js';fjs.parentNode.insertBefore(js, fjs);t._e = [];t.ready = function(f) {t._e.push(f);};return t;}(document, 'script', 'twitter-wjs'));</script><script>
document.addEventListener('keyup', function(e) {
if (e.target !== document.body) {
return;
}
// keyCode for '/' (slash)
if (e.keyCode === 191) {
const search = document.getElementById('search_input_react');
search && search.focus();
}
});
</script><script>
var search = docsearch({
apiKey: '45202133606c0b5fa6d21cddc4725dd8',
indexName: 'apache_singa',
inputSelector: '#search_input_react',
algoliaOptions: {"facetFilters":["language:en","version:3.0.0"]}
});
</script></body></html>