blob: 0eae0b5163b21a8c6054048dff6e97b4cceba5aa [file] [log] [blame]
<!-- HTML header for doxygen 1.8.9.1-->
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.6"/>
<meta name="robots" content="NOINDEX, NOFOLLOW" /> <!-- Prevent indexing by search engines -->
<title>Compute Library: AlexNetNetwork&lt; ITensorType, TensorType, SubTensorType, Accessor, ActivationLayerFunction, ConvolutionLayerFunction, DirectConvolutionLayerFunction, FullyConnectedLayerFunction, NormalizationLayerFunction, PoolingLayerFunction, SoftmaxLayerFunction &gt; Class Template Reference</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<script type="text/javascript" src="navtree.js"></script>
<script type="text/javascript">
$(document).ready(initResizable);
$(window).load(resizeHeight);
</script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/search.js"></script>
<script type="text/javascript">
$(document).ready(function() { searchBox.OnSelectItem(0); });
</script>
<script type="text/x-mathjax-config">
MathJax.Hub.Config({
extensions: ["tex2jax.js"],
jax: ["input/TeX","output/HTML-CSS"],
});
</script><script src="http://cdn.mathjax.org/mathjax/latest/MathJax.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td style="padding-left: 0.5em;">
<div id="projectname">Compute Library
&#160;<span id="projectnumber">17.10</span>
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.6 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.xhtml"><span>Main&#160;Page</span></a></li>
<li><a href="pages.xhtml"><span>Related&#160;Pages</span></a></li>
<li><a href="namespaces.xhtml"><span>Namespaces</span></a></li>
<li class="current"><a href="annotated.xhtml"><span>Data&#160;Structures</span></a></li>
<li><a href="files.xhtml"><span>Files</span></a></li>
<li>
<div id="MSearchBox" class="MSearchBoxInactive">
<span class="left">
<img id="MSearchSelect" src="search/mag_sel.png"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
alt=""/>
<input type="text" id="MSearchField" value="Search" accesskey="S"
onfocus="searchBox.OnSearchFieldFocus(true)"
onblur="searchBox.OnSearchFieldFocus(false)"
onkeyup="searchBox.OnSearchFieldChange(event)"/>
</span><span class="right">
<a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="search/close.png" alt=""/></a>
</span>
</div>
</li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li><a href="annotated.xhtml"><span>Data&#160;Structures</span></a></li>
<li><a href="classes.xhtml"><span>Data&#160;Structure&#160;Index</span></a></li>
<li><a href="inherits.xhtml"><span>Class&#160;Hierarchy</span></a></li>
<li><a href="functions.xhtml"><span>Data&#160;Fields</span></a></li>
</ul>
</div>
</div><!-- top -->
<div id="side-nav" class="ui-resizable side-nav-resizable">
<div id="nav-tree">
<div id="nav-tree-contents">
<div id="nav-sync" class="sync"></div>
</div>
</div>
<div id="splitbar" style="-moz-user-select:none;"
class="ui-resizable-handle">
</div>
</div>
<script type="text/javascript">
$(document).ready(function(){initNavTree('classarm__compute_1_1test_1_1networks_1_1_alex_net_network.xhtml','');});
</script>
<div id="doc-content">
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
<a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(0)"><span class="SelectionMark">&#160;</span>All</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(1)"><span class="SelectionMark">&#160;</span>Data Structures</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(2)"><span class="SelectionMark">&#160;</span>Namespaces</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(3)"><span class="SelectionMark">&#160;</span>Files</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(4)"><span class="SelectionMark">&#160;</span>Functions</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(5)"><span class="SelectionMark">&#160;</span>Variables</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(6)"><span class="SelectionMark">&#160;</span>Typedefs</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(7)"><span class="SelectionMark">&#160;</span>Enumerations</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(8)"><span class="SelectionMark">&#160;</span>Enumerator</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(9)"><span class="SelectionMark">&#160;</span>Friends</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(10)"><span class="SelectionMark">&#160;</span>Macros</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(11)"><span class="SelectionMark">&#160;</span>Pages</a></div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div class="header">
<div class="summary">
<a href="#nested-classes">Data Structures</a> &#124;
<a href="#pub-methods">Public Member Functions</a> </div>
<div class="headertitle">
<div class="title">AlexNetNetwork&lt; ITensorType, TensorType, SubTensorType, Accessor, ActivationLayerFunction, ConvolutionLayerFunction, DirectConvolutionLayerFunction, FullyConnectedLayerFunction, NormalizationLayerFunction, PoolingLayerFunction, SoftmaxLayerFunction &gt; Class Template Reference</div> </div>
</div><!--header-->
<div class="contents">
<p>AlexNet model object.
<a href="classarm__compute_1_1test_1_1networks_1_1_alex_net_network.xhtml#details">More...</a></p>
<p><code>#include &lt;<a class="el" href="_alex_net_network_8h_source.xhtml">AlexNetNetwork.h</a>&gt;</code></p>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pub-methods"></a>
Public Member Functions</h2></td></tr>
<tr class="memitem:ab4d5b5821653f1eeabef922fbe3b9a91"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarm__compute_1_1test_1_1networks_1_1_alex_net_network.xhtml#ab4d5b5821653f1eeabef922fbe3b9a91">init</a> (<a class="el" href="namespacearm__compute.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">DataType</a> data_type, int fixed_point_position, int batches, bool reshaped_weights=false)</td></tr>
<tr class="separator:ab4d5b5821653f1eeabef922fbe3b9a91"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a7740c7ab195c03ac140f1f75f633470f"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarm__compute_1_1test_1_1networks_1_1_alex_net_network.xhtml#a7740c7ab195c03ac140f1f75f633470f">build</a> ()</td></tr>
<tr class="separator:a7740c7ab195c03ac140f1f75f633470f"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:acaefe811b78a2fdc4a0dba0c4029c3ef"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarm__compute_1_1test_1_1networks_1_1_alex_net_network.xhtml#acaefe811b78a2fdc4a0dba0c4029c3ef">allocate</a> ()</td></tr>
<tr class="separator:acaefe811b78a2fdc4a0dba0c4029c3ef"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a3b778cda9ac3fad08e7217edbcb942e0"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarm__compute_1_1test_1_1networks_1_1_alex_net_network.xhtml#a3b778cda9ac3fad08e7217edbcb942e0">fill_random</a> ()</td></tr>
<tr class="memdesc:a3b778cda9ac3fad08e7217edbcb942e0"><td class="mdescLeft">&#160;</td><td class="mdescRight">Fills the trainable parameters and input with random data. <a href="#a3b778cda9ac3fad08e7217edbcb942e0">More...</a><br/></td></tr>
<tr class="separator:a3b778cda9ac3fad08e7217edbcb942e0"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:aab0a3920e581535eeb32febaf20dca50"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarm__compute_1_1test_1_1networks_1_1_alex_net_network.xhtml#aab0a3920e581535eeb32febaf20dca50">fill</a> (std::vector&lt; std::string &gt; weights, std::vector&lt; std::string &gt; biases)</td></tr>
<tr class="memdesc:aab0a3920e581535eeb32febaf20dca50"><td class="mdescLeft">&#160;</td><td class="mdescRight">Fills the trainable parameters from binary files. <a href="#aab0a3920e581535eeb32febaf20dca50">More...</a><br/></td></tr>
<tr class="separator:aab0a3920e581535eeb32febaf20dca50"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a3a41262ce9aed70a248ecefae646013b"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarm__compute_1_1test_1_1networks_1_1_alex_net_network.xhtml#a3a41262ce9aed70a248ecefae646013b">feed</a> (std::string name)</td></tr>
<tr class="memdesc:a3a41262ce9aed70a248ecefae646013b"><td class="mdescLeft">&#160;</td><td class="mdescRight">Feed input to network from file. <a href="#a3a41262ce9aed70a248ecefae646013b">More...</a><br/></td></tr>
<tr class="separator:a3a41262ce9aed70a248ecefae646013b"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a1466ef70729f3c8b5da5ebfec3f53f26"><td class="memItemLeft" align="right" valign="top">std::vector&lt; unsigned int &gt;&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarm__compute_1_1test_1_1networks_1_1_alex_net_network.xhtml#a1466ef70729f3c8b5da5ebfec3f53f26">get_classifications</a> ()</td></tr>
<tr class="memdesc:a1466ef70729f3c8b5da5ebfec3f53f26"><td class="mdescLeft">&#160;</td><td class="mdescRight">Get the classification results. <a href="#a1466ef70729f3c8b5da5ebfec3f53f26">More...</a><br/></td></tr>
<tr class="separator:a1466ef70729f3c8b5da5ebfec3f53f26"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:ac8bb3912a3ce86b15842e79d0b421204"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarm__compute_1_1test_1_1networks_1_1_alex_net_network.xhtml#ac8bb3912a3ce86b15842e79d0b421204">clear</a> ()</td></tr>
<tr class="memdesc:ac8bb3912a3ce86b15842e79d0b421204"><td class="mdescLeft">&#160;</td><td class="mdescRight">Clear all allocated memory from the tensor objects. <a href="#ac8bb3912a3ce86b15842e79d0b421204">More...</a><br/></td></tr>
<tr class="separator:ac8bb3912a3ce86b15842e79d0b421204"><td class="memSeparator" colspan="2">&#160;</td></tr>
<tr class="memitem:a13a43e6d814de94978c515cb084873b1"><td class="memItemLeft" align="right" valign="top">void&#160;</td><td class="memItemRight" valign="bottom"><a class="el" href="classarm__compute_1_1test_1_1networks_1_1_alex_net_network.xhtml#a13a43e6d814de94978c515cb084873b1">run</a> ()</td></tr>
<tr class="memdesc:a13a43e6d814de94978c515cb084873b1"><td class="mdescLeft">&#160;</td><td class="mdescRight">Runs the model. <a href="#a13a43e6d814de94978c515cb084873b1">More...</a><br/></td></tr>
<tr class="separator:a13a43e6d814de94978c515cb084873b1"><td class="memSeparator" colspan="2">&#160;</td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><h3>template&lt;typename ITensorType, typename TensorType, typename SubTensorType, typename Accessor, typename ActivationLayerFunction, typename ConvolutionLayerFunction, typename DirectConvolutionLayerFunction, typename FullyConnectedLayerFunction, typename NormalizationLayerFunction, typename PoolingLayerFunction, typename SoftmaxLayerFunction&gt;<br/>
class arm_compute::test::networks::AlexNetNetwork&lt; ITensorType, TensorType, SubTensorType, Accessor, ActivationLayerFunction, ConvolutionLayerFunction, DirectConvolutionLayerFunction, FullyConnectedLayerFunction, NormalizationLayerFunction, PoolingLayerFunction, SoftmaxLayerFunction &gt;</h3>
<p>AlexNet model object. </p>
<p>Definition at line <a class="el" href="_alex_net_network_8h_source.xhtml#l00054">54</a> of file <a class="el" href="_alex_net_network_8h_source.xhtml">AlexNetNetwork.h</a>.</p>
</div><h2 class="groupheader">Member Function Documentation</h2>
<a class="anchor" id="acaefe811b78a2fdc4a0dba0c4029c3ef"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">void allocate </td>
<td>(</td>
<td class="paramname"></td><td>)</td>
<td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Definition at line <a class="el" href="_alex_net_network_8h_source.xhtml#l00273">273</a> of file <a class="el" href="_alex_net_network_8h_source.xhtml">AlexNetNetwork.h</a>.</p>
<div class="fragment"><div class="line"><a name="l00274"></a><span class="lineno"> 274</span>&#160; {</div>
<div class="line"><a name="l00275"></a><span class="lineno"> 275</span>&#160; input.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00276"></a><span class="lineno"> 276</span>&#160; output.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00277"></a><span class="lineno"> 277</span>&#160;</div>
<div class="line"><a name="l00278"></a><span class="lineno"> 278</span>&#160; <span class="keywordflow">if</span>(!_reshaped_weights)</div>
<div class="line"><a name="l00279"></a><span class="lineno"> 279</span>&#160; {</div>
<div class="line"><a name="l00280"></a><span class="lineno"> 280</span>&#160; <span class="keywordflow">for</span>(<span class="keyword">auto</span> &amp;wi : w)</div>
<div class="line"><a name="l00281"></a><span class="lineno"> 281</span>&#160; {</div>
<div class="line"><a name="l00282"></a><span class="lineno"> 282</span>&#160; wi.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00283"></a><span class="lineno"> 283</span>&#160; }</div>
<div class="line"><a name="l00284"></a><span class="lineno"> 284</span>&#160;</div>
<div class="line"><a name="l00285"></a><span class="lineno"> 285</span>&#160; <span class="keywordflow">for</span>(<span class="keyword">auto</span> &amp;bi : b)</div>
<div class="line"><a name="l00286"></a><span class="lineno"> 286</span>&#160; {</div>
<div class="line"><a name="l00287"></a><span class="lineno"> 287</span>&#160; bi.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00288"></a><span class="lineno"> 288</span>&#160; }</div>
<div class="line"><a name="l00289"></a><span class="lineno"> 289</span>&#160; }</div>
<div class="line"><a name="l00290"></a><span class="lineno"> 290</span>&#160; <span class="keywordflow">else</span></div>
<div class="line"><a name="l00291"></a><span class="lineno"> 291</span>&#160; {</div>
<div class="line"><a name="l00292"></a><span class="lineno"> 292</span>&#160; w[0].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00293"></a><span class="lineno"> 293</span>&#160; w[2].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00294"></a><span class="lineno"> 294</span>&#160; w[5].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00295"></a><span class="lineno"> 295</span>&#160; w[6].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00296"></a><span class="lineno"> 296</span>&#160; w[7].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00297"></a><span class="lineno"> 297</span>&#160;</div>
<div class="line"><a name="l00298"></a><span class="lineno"> 298</span>&#160; b[5].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00299"></a><span class="lineno"> 299</span>&#160; b[6].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00300"></a><span class="lineno"> 300</span>&#160; b[7].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00301"></a><span class="lineno"> 301</span>&#160;</div>
<div class="line"><a name="l00302"></a><span class="lineno"> 302</span>&#160; <span class="keywordflow">if</span>(!_is_direct_conv)</div>
<div class="line"><a name="l00303"></a><span class="lineno"> 303</span>&#160; {</div>
<div class="line"><a name="l00304"></a><span class="lineno"> 304</span>&#160; <span class="keyword">dynamic_cast&lt;</span>TensorType *<span class="keyword">&gt;</span>(w11.get())-&gt;allocator()-&gt;allocate();</div>
<div class="line"><a name="l00305"></a><span class="lineno"> 305</span>&#160; <span class="keyword">dynamic_cast&lt;</span>TensorType *<span class="keyword">&gt;</span>(w12.get())-&gt;allocator()-&gt;allocate();</div>
<div class="line"><a name="l00306"></a><span class="lineno"> 306</span>&#160; <span class="keyword">dynamic_cast&lt;</span>TensorType *<span class="keyword">&gt;</span>(w31.get())-&gt;allocator()-&gt;allocate();</div>
<div class="line"><a name="l00307"></a><span class="lineno"> 307</span>&#160; <span class="keyword">dynamic_cast&lt;</span>TensorType *<span class="keyword">&gt;</span>(w32.get())-&gt;allocator()-&gt;allocate();</div>
<div class="line"><a name="l00308"></a><span class="lineno"> 308</span>&#160; <span class="keyword">dynamic_cast&lt;</span>TensorType *<span class="keyword">&gt;</span>(w41.get())-&gt;allocator()-&gt;allocate();</div>
<div class="line"><a name="l00309"></a><span class="lineno"> 309</span>&#160; <span class="keyword">dynamic_cast&lt;</span>TensorType *<span class="keyword">&gt;</span>(w42.get())-&gt;allocator()-&gt;allocate();</div>
<div class="line"><a name="l00310"></a><span class="lineno"> 310</span>&#160; }</div>
<div class="line"><a name="l00311"></a><span class="lineno"> 311</span>&#160; <span class="keywordflow">else</span></div>
<div class="line"><a name="l00312"></a><span class="lineno"> 312</span>&#160; {</div>
<div class="line"><a name="l00313"></a><span class="lineno"> 313</span>&#160; b[1].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00314"></a><span class="lineno"> 314</span>&#160; b[2].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00315"></a><span class="lineno"> 315</span>&#160; b[3].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00316"></a><span class="lineno"> 316</span>&#160; b[4].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00317"></a><span class="lineno"> 317</span>&#160; w[1].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00318"></a><span class="lineno"> 318</span>&#160; w[3].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00319"></a><span class="lineno"> 319</span>&#160; w[4].allocator()-&gt;allocate();</div>
<div class="line"><a name="l00320"></a><span class="lineno"> 320</span>&#160; }</div>
<div class="line"><a name="l00321"></a><span class="lineno"> 321</span>&#160; }</div>
<div class="line"><a name="l00322"></a><span class="lineno"> 322</span>&#160;</div>
<div class="line"><a name="l00323"></a><span class="lineno"> 323</span>&#160; conv1_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00324"></a><span class="lineno"> 324</span>&#160; act1_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00325"></a><span class="lineno"> 325</span>&#160; norm1_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00326"></a><span class="lineno"> 326</span>&#160; pool1_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00327"></a><span class="lineno"> 327</span>&#160; conv2_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00328"></a><span class="lineno"> 328</span>&#160; act2_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00329"></a><span class="lineno"> 329</span>&#160; norm2_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00330"></a><span class="lineno"> 330</span>&#160; pool2_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00331"></a><span class="lineno"> 331</span>&#160; conv3_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00332"></a><span class="lineno"> 332</span>&#160; act3_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00333"></a><span class="lineno"> 333</span>&#160; conv4_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00334"></a><span class="lineno"> 334</span>&#160; act4_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00335"></a><span class="lineno"> 335</span>&#160; conv5_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00336"></a><span class="lineno"> 336</span>&#160; act5_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00337"></a><span class="lineno"> 337</span>&#160; pool5_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00338"></a><span class="lineno"> 338</span>&#160; fc6_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00339"></a><span class="lineno"> 339</span>&#160; act6_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00340"></a><span class="lineno"> 340</span>&#160; fc7_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00341"></a><span class="lineno"> 341</span>&#160; act7_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00342"></a><span class="lineno"> 342</span>&#160; fc8_out.allocator()-&gt;allocate();</div>
<div class="line"><a name="l00343"></a><span class="lineno"> 343</span>&#160; }</div>
</div><!-- fragment -->
</div>
</div>
<a class="anchor" id="a7740c7ab195c03ac140f1f75f633470f"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">void build </td>
<td>(</td>
<td class="paramname"></td><td>)</td>
<td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Definition at line <a class="el" href="_alex_net_network_8h_source.xhtml#l00188">188</a> of file <a class="el" href="_alex_net_network_8h_source.xhtml">AlexNetNetwork.h</a>.</p>
<div class="fragment"><div class="line"><a name="l00189"></a><span class="lineno"> 189</span>&#160; {</div>
<div class="line"><a name="l00190"></a><span class="lineno"> 190</span>&#160; input.allocator()-&gt;init(TensorInfo(TensorShape(227U, 227U, 3U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00191"></a><span class="lineno"> 191</span>&#160; output.allocator()-&gt;init(TensorInfo(TensorShape(1000U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00192"></a><span class="lineno"> 192</span>&#160;</div>
<div class="line"><a name="l00193"></a><span class="lineno"> 193</span>&#160; <span class="comment">// Initialize intermediate tensors</span></div>
<div class="line"><a name="l00194"></a><span class="lineno"> 194</span>&#160; <span class="comment">// Layer 1</span></div>
<div class="line"><a name="l00195"></a><span class="lineno"> 195</span>&#160; conv1_out.allocator()-&gt;init(TensorInfo(TensorShape(55U, 55U, 96U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00196"></a><span class="lineno"> 196</span>&#160; act1_out.allocator()-&gt;init(TensorInfo(TensorShape(55U, 55U, 96U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00197"></a><span class="lineno"> 197</span>&#160; norm1_out.allocator()-&gt;init(TensorInfo(TensorShape(55U, 55U, 96U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00198"></a><span class="lineno"> 198</span>&#160; pool1_out.allocator()-&gt;init(TensorInfo(TensorShape(27U, 27U, 96U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00199"></a><span class="lineno"> 199</span>&#160; pool11_out = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;pool1_out, TensorShape(27U, 27U, 48U, _batches), Coordinates()));</div>
<div class="line"><a name="l00200"></a><span class="lineno"> 200</span>&#160; pool12_out = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;pool1_out, TensorShape(27U, 27U, 48U, _batches), Coordinates(0, 0, 48)));</div>
<div class="line"><a name="l00201"></a><span class="lineno"> 201</span>&#160; <span class="comment">// Layer 2</span></div>
<div class="line"><a name="l00202"></a><span class="lineno"> 202</span>&#160; conv2_out.allocator()-&gt;init(TensorInfo(TensorShape(27U, 27U, 256U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00203"></a><span class="lineno"> 203</span>&#160; conv21_out = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;conv2_out, TensorShape(27U, 27U, 128U, _batches), Coordinates()));</div>
<div class="line"><a name="l00204"></a><span class="lineno"> 204</span>&#160; conv22_out = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;conv2_out, TensorShape(27U, 27U, 128U, _batches), Coordinates(0, 0, 128)));</div>
<div class="line"><a name="l00205"></a><span class="lineno"> 205</span>&#160; act2_out.allocator()-&gt;init(TensorInfo(TensorShape(27U, 27U, 256U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00206"></a><span class="lineno"> 206</span>&#160; norm2_out.allocator()-&gt;init(TensorInfo(TensorShape(27U, 27U, 256U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00207"></a><span class="lineno"> 207</span>&#160; pool2_out.allocator()-&gt;init(TensorInfo(TensorShape(13U, 13U, 256U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00208"></a><span class="lineno"> 208</span>&#160; <span class="comment">// Layer 3</span></div>
<div class="line"><a name="l00209"></a><span class="lineno"> 209</span>&#160; conv3_out.allocator()-&gt;init(TensorInfo(TensorShape(13U, 13U, 384U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00210"></a><span class="lineno"> 210</span>&#160; act3_out.allocator()-&gt;init(TensorInfo(TensorShape(13U, 13U, 384U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00211"></a><span class="lineno"> 211</span>&#160; act31_out = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;act3_out, TensorShape(13U, 13U, 192U, _batches), Coordinates()));</div>
<div class="line"><a name="l00212"></a><span class="lineno"> 212</span>&#160; act32_out = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;act3_out, TensorShape(13U, 13U, 192U, _batches), Coordinates(0, 0, 192)));</div>
<div class="line"><a name="l00213"></a><span class="lineno"> 213</span>&#160; <span class="comment">// Layer 4</span></div>
<div class="line"><a name="l00214"></a><span class="lineno"> 214</span>&#160; conv4_out.allocator()-&gt;init(TensorInfo(TensorShape(13U, 13U, 384U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00215"></a><span class="lineno"> 215</span>&#160; conv41_out = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;conv4_out, TensorShape(13U, 13U, 192U, _batches), Coordinates()));</div>
<div class="line"><a name="l00216"></a><span class="lineno"> 216</span>&#160; conv42_out = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;conv4_out, TensorShape(13U, 13U, 192U, _batches), Coordinates(0, 0, 192)));</div>
<div class="line"><a name="l00217"></a><span class="lineno"> 217</span>&#160; act4_out.allocator()-&gt;init(TensorInfo(TensorShape(13U, 13U, 384U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00218"></a><span class="lineno"> 218</span>&#160; act41_out = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;act4_out, TensorShape(13U, 13U, 192U, _batches), Coordinates()));</div>
<div class="line"><a name="l00219"></a><span class="lineno"> 219</span>&#160; act42_out = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;act4_out, TensorShape(13U, 13U, 192U, _batches), Coordinates(0, 0, 192)));</div>
<div class="line"><a name="l00220"></a><span class="lineno"> 220</span>&#160; <span class="comment">// Layer 5</span></div>
<div class="line"><a name="l00221"></a><span class="lineno"> 221</span>&#160; conv5_out.allocator()-&gt;init(TensorInfo(TensorShape(13U, 13U, 256U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00222"></a><span class="lineno"> 222</span>&#160; conv51_out = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;conv5_out, TensorShape(13U, 13U, 128U, _batches), Coordinates()));</div>
<div class="line"><a name="l00223"></a><span class="lineno"> 223</span>&#160; conv52_out = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;conv5_out, TensorShape(13U, 13U, 128U, _batches), Coordinates(0, 0, 128)));</div>
<div class="line"><a name="l00224"></a><span class="lineno"> 224</span>&#160; act5_out.allocator()-&gt;init(TensorInfo(TensorShape(13U, 13U, 256U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00225"></a><span class="lineno"> 225</span>&#160; pool5_out.allocator()-&gt;init(TensorInfo(TensorShape(6U, 6U, 256U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00226"></a><span class="lineno"> 226</span>&#160; <span class="comment">// Layer 6</span></div>
<div class="line"><a name="l00227"></a><span class="lineno"> 227</span>&#160; fc6_out.allocator()-&gt;init(TensorInfo(TensorShape(4096U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00228"></a><span class="lineno"> 228</span>&#160; act6_out.allocator()-&gt;init(TensorInfo(TensorShape(4096U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00229"></a><span class="lineno"> 229</span>&#160; <span class="comment">// Layer 7</span></div>
<div class="line"><a name="l00230"></a><span class="lineno"> 230</span>&#160; fc7_out.allocator()-&gt;init(TensorInfo(TensorShape(4096U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00231"></a><span class="lineno"> 231</span>&#160; act7_out.allocator()-&gt;init(TensorInfo(TensorShape(4096U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00232"></a><span class="lineno"> 232</span>&#160; <span class="comment">// Layer 8</span></div>
<div class="line"><a name="l00233"></a><span class="lineno"> 233</span>&#160; fc8_out.allocator()-&gt;init(TensorInfo(TensorShape(1000U, _batches), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00234"></a><span class="lineno"> 234</span>&#160;</div>
<div class="line"><a name="l00235"></a><span class="lineno"> 235</span>&#160; <span class="comment">// Configure Layers</span></div>
<div class="line"><a name="l00236"></a><span class="lineno"> 236</span>&#160; <span class="comment">// Layer 1</span></div>
<div class="line"><a name="l00237"></a><span class="lineno"> 237</span>&#160; TensorType *b0 = _reshaped_weights ? <span class="keyword">nullptr</span> : &amp;b[0];</div>
<div class="line"><a name="l00238"></a><span class="lineno"> 238</span>&#160; conv1.configure(&amp;input, &amp;w[0], b0, &amp;conv1_out, PadStrideInfo(4, 4, 0, 0), WeightsInfo(_reshaped_weights, 11U, 11U, 96U));</div>
<div class="line"><a name="l00239"></a><span class="lineno"> 239</span>&#160; act1.configure(&amp;conv1_out, &amp;act1_out, ActivationLayerInfo(<a class="code" href="classarm__compute_1_1_activation_layer_info.xhtml#a56297e0f7b215eea46c818cb7528d9eaad346bb4679d29be241279f15d7795c1c">ActivationLayerInfo::ActivationFunction::RELU</a>));</div>
<div class="line"><a name="l00240"></a><span class="lineno"> 240</span>&#160; norm1.configure(&amp;act1_out, &amp;norm1_out, NormalizationLayerInfo(<a class="code" href="namespacearm__compute.xhtml#ad4bb8dabdbf8ad75e34220cc666b59caa980fef040549733973683b1a868f96e5">NormType::CROSS_MAP</a>, 5, 0.0001f, 0.75f));</div>
<div class="line"><a name="l00241"></a><span class="lineno"> 241</span>&#160; pool1.configure(&amp;norm1_out, &amp;pool1_out, PoolingLayerInfo(<a class="code" href="namespacearm__compute.xhtml#adf2ced65e536375a1c96425d9fced858a26a4b44a837bf97b972628509912b4a5">PoolingType::MAX</a>, 3, PadStrideInfo(2, 2, 0, 0)));</div>
<div class="line"><a name="l00242"></a><span class="lineno"> 242</span>&#160; <span class="comment">// Layer 2</span></div>
<div class="line"><a name="l00243"></a><span class="lineno"> 243</span>&#160; conv21.configure(pool11_out.get(), w11.get(), b11.get(), conv21_out.get(), PadStrideInfo(1, 1, 2, 2), WeightsInfo(_reshaped_weights, 5U, 5U, 128U));</div>
<div class="line"><a name="l00244"></a><span class="lineno"> 244</span>&#160; conv22.configure(pool12_out.get(), w12.get(), b12.get(), conv22_out.get(), PadStrideInfo(1, 1, 2, 2), WeightsInfo(_reshaped_weights, 5U, 5U, 128U));</div>
<div class="line"><a name="l00245"></a><span class="lineno"> 245</span>&#160; act2.configure(&amp;conv2_out, &amp;act2_out, ActivationLayerInfo(<a class="code" href="classarm__compute_1_1_activation_layer_info.xhtml#a56297e0f7b215eea46c818cb7528d9eaad346bb4679d29be241279f15d7795c1c">ActivationLayerInfo::ActivationFunction::RELU</a>));</div>
<div class="line"><a name="l00246"></a><span class="lineno"> 246</span>&#160; norm2.configure(&amp;act2_out, &amp;norm2_out, NormalizationLayerInfo(<a class="code" href="namespacearm__compute.xhtml#ad4bb8dabdbf8ad75e34220cc666b59caa980fef040549733973683b1a868f96e5">NormType::CROSS_MAP</a>, 5, 0.0001f, 0.75f));</div>
<div class="line"><a name="l00247"></a><span class="lineno"> 247</span>&#160; pool2.configure(&amp;norm2_out, &amp;pool2_out, PoolingLayerInfo(<a class="code" href="namespacearm__compute.xhtml#adf2ced65e536375a1c96425d9fced858a26a4b44a837bf97b972628509912b4a5">PoolingType::MAX</a>, 3, PadStrideInfo(2, 2, 0, 0)));</div>
<div class="line"><a name="l00248"></a><span class="lineno"> 248</span>&#160; <span class="comment">// Layer 3</span></div>
<div class="line"><a name="l00249"></a><span class="lineno"> 249</span>&#160; TensorType *b2 = (_reshaped_weights &amp;&amp; !_is_direct_conv) ? <span class="keyword">nullptr</span> : &amp;b[2];</div>
<div class="line"><a name="l00250"></a><span class="lineno"> 250</span>&#160; conv3.configure(&amp;pool2_out, &amp;w[2], b2, &amp;conv3_out, PadStrideInfo(1, 1, 1, 1), WeightsInfo(_reshaped_weights, 3U, 3U, 384U));</div>
<div class="line"><a name="l00251"></a><span class="lineno"> 251</span>&#160; act3.configure(&amp;conv3_out, &amp;act3_out, ActivationLayerInfo(<a class="code" href="classarm__compute_1_1_activation_layer_info.xhtml#a56297e0f7b215eea46c818cb7528d9eaad346bb4679d29be241279f15d7795c1c">ActivationLayerInfo::ActivationFunction::RELU</a>));</div>
<div class="line"><a name="l00252"></a><span class="lineno"> 252</span>&#160; <span class="comment">// Layer 4</span></div>
<div class="line"><a name="l00253"></a><span class="lineno"> 253</span>&#160; conv41.configure(act31_out.get(), w31.get(), b31.get(), conv41_out.get(), PadStrideInfo(1, 1, 1, 1), WeightsInfo(_reshaped_weights, 3U, 3U, 192U));</div>
<div class="line"><a name="l00254"></a><span class="lineno"> 254</span>&#160; conv42.configure(act32_out.get(), w32.get(), b32.get(), conv42_out.get(), PadStrideInfo(1, 1, 1, 1), WeightsInfo(_reshaped_weights, 3U, 3U, 192U));</div>
<div class="line"><a name="l00255"></a><span class="lineno"> 255</span>&#160; act4.configure(&amp;conv4_out, &amp;act4_out, ActivationLayerInfo(<a class="code" href="classarm__compute_1_1_activation_layer_info.xhtml#a56297e0f7b215eea46c818cb7528d9eaad346bb4679d29be241279f15d7795c1c">ActivationLayerInfo::ActivationFunction::RELU</a>));</div>
<div class="line"><a name="l00256"></a><span class="lineno"> 256</span>&#160; <span class="comment">// Layer 5</span></div>
<div class="line"><a name="l00257"></a><span class="lineno"> 257</span>&#160; conv51.configure(act41_out.get(), w41.get(), b41.get(), conv51_out.get(), PadStrideInfo(1, 1, 1, 1), WeightsInfo(_reshaped_weights, 3U, 3U, 128U));</div>
<div class="line"><a name="l00258"></a><span class="lineno"> 258</span>&#160; conv52.configure(act42_out.get(), w42.get(), b42.get(), conv52_out.get(), PadStrideInfo(1, 1, 1, 1), WeightsInfo(_reshaped_weights, 3U, 3U, 128U));</div>
<div class="line"><a name="l00259"></a><span class="lineno"> 259</span>&#160; act5.configure(&amp;conv5_out, &amp;act5_out, ActivationLayerInfo(<a class="code" href="classarm__compute_1_1_activation_layer_info.xhtml#a56297e0f7b215eea46c818cb7528d9eaad346bb4679d29be241279f15d7795c1c">ActivationLayerInfo::ActivationFunction::RELU</a>));</div>
<div class="line"><a name="l00260"></a><span class="lineno"> 260</span>&#160; pool5.configure(&amp;act5_out, &amp;pool5_out, PoolingLayerInfo(<a class="code" href="namespacearm__compute.xhtml#adf2ced65e536375a1c96425d9fced858a26a4b44a837bf97b972628509912b4a5">PoolingType::MAX</a>, 3, PadStrideInfo(2, 2, 0, 0)));</div>
<div class="line"><a name="l00261"></a><span class="lineno"> 261</span>&#160; <span class="comment">// Layer 6</span></div>
<div class="line"><a name="l00262"></a><span class="lineno"> 262</span>&#160; fc6.configure(&amp;pool5_out, &amp;w[5], &amp;b[5], &amp;fc6_out, <span class="keyword">true</span>, _reshaped_weights);</div>
<div class="line"><a name="l00263"></a><span class="lineno"> 263</span>&#160; act6.configure(&amp;fc6_out, &amp;act6_out, ActivationLayerInfo(<a class="code" href="classarm__compute_1_1_activation_layer_info.xhtml#a56297e0f7b215eea46c818cb7528d9eaad346bb4679d29be241279f15d7795c1c">ActivationLayerInfo::ActivationFunction::RELU</a>));</div>
<div class="line"><a name="l00264"></a><span class="lineno"> 264</span>&#160; <span class="comment">// Layer 7</span></div>
<div class="line"><a name="l00265"></a><span class="lineno"> 265</span>&#160; fc7.configure(&amp;act6_out, &amp;w[6], &amp;b[6], &amp;fc7_out, <span class="keyword">true</span>, _reshaped_weights);</div>
<div class="line"><a name="l00266"></a><span class="lineno"> 266</span>&#160; act7.configure(&amp;fc7_out, &amp;act7_out, ActivationLayerInfo(<a class="code" href="classarm__compute_1_1_activation_layer_info.xhtml#a56297e0f7b215eea46c818cb7528d9eaad346bb4679d29be241279f15d7795c1c">ActivationLayerInfo::ActivationFunction::RELU</a>));</div>
<div class="line"><a name="l00267"></a><span class="lineno"> 267</span>&#160; <span class="comment">// Layer 8</span></div>
<div class="line"><a name="l00268"></a><span class="lineno"> 268</span>&#160; fc8.configure(&amp;act7_out, &amp;w[7], &amp;b[7], &amp;fc8_out, <span class="keyword">true</span>, _reshaped_weights);</div>
<div class="line"><a name="l00269"></a><span class="lineno"> 269</span>&#160; <span class="comment">// Softmax</span></div>
<div class="line"><a name="l00270"></a><span class="lineno"> 270</span>&#160; smx.configure(&amp;fc8_out, &amp;output);</div>
<div class="line"><a name="l00271"></a><span class="lineno"> 271</span>&#160; }</div>
<div class="ttc" id="classarm__compute_1_1_activation_layer_info_xhtml_a56297e0f7b215eea46c818cb7528d9eaad346bb4679d29be241279f15d7795c1c"><div class="ttname"><a href="classarm__compute_1_1_activation_layer_info.xhtml#a56297e0f7b215eea46c818cb7528d9eaad346bb4679d29be241279f15d7795c1c">arm_compute::ActivationLayerInfo::ActivationFunction::RELU</a></div><div class="ttdoc">Rectifier ( ) </div></div>
<div class="ttc" id="namespacearm__compute_xhtml_adf2ced65e536375a1c96425d9fced858a26a4b44a837bf97b972628509912b4a5"><div class="ttname"><a href="namespacearm__compute.xhtml#adf2ced65e536375a1c96425d9fced858a26a4b44a837bf97b972628509912b4a5">arm_compute::NonLinearFilterFunction::MAX</a></div><div class="ttdoc">Non linear dilate. </div></div>
<div class="ttc" id="namespacearm__compute_xhtml_ad4bb8dabdbf8ad75e34220cc666b59caa980fef040549733973683b1a868f96e5"><div class="ttname"><a href="namespacearm__compute.xhtml#ad4bb8dabdbf8ad75e34220cc666b59caa980fef040549733973683b1a868f96e5">arm_compute::NormType::CROSS_MAP</a></div><div class="ttdoc">Normalization applied cross maps. </div></div>
</div><!-- fragment -->
</div>
</div>
<a class="anchor" id="ac8bb3912a3ce86b15842e79d0b421204"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">void clear </td>
<td>(</td>
<td class="paramname"></td><td>)</td>
<td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Clear all allocated memory from the tensor objects. </p>
<p>Definition at line <a class="el" href="_alex_net_network_8h_source.xhtml#l00454">454</a> of file <a class="el" href="_alex_net_network_8h_source.xhtml">AlexNetNetwork.h</a>.</p>
<div class="fragment"><div class="line"><a name="l00455"></a><span class="lineno"> 455</span>&#160; {</div>
<div class="line"><a name="l00456"></a><span class="lineno"> 456</span>&#160; <span class="comment">// Free allocations</span></div>
<div class="line"><a name="l00457"></a><span class="lineno"> 457</span>&#160; input.allocator()-&gt;free();</div>
<div class="line"><a name="l00458"></a><span class="lineno"> 458</span>&#160; output.allocator()-&gt;free();</div>
<div class="line"><a name="l00459"></a><span class="lineno"> 459</span>&#160;</div>
<div class="line"><a name="l00460"></a><span class="lineno"> 460</span>&#160; <span class="keywordflow">if</span>(!_reshaped_weights)</div>
<div class="line"><a name="l00461"></a><span class="lineno"> 461</span>&#160; {</div>
<div class="line"><a name="l00462"></a><span class="lineno"> 462</span>&#160; <span class="keywordflow">for</span>(<span class="keyword">auto</span> &amp;wi : w)</div>
<div class="line"><a name="l00463"></a><span class="lineno"> 463</span>&#160; {</div>
<div class="line"><a name="l00464"></a><span class="lineno"> 464</span>&#160; wi.allocator()-&gt;free();</div>
<div class="line"><a name="l00465"></a><span class="lineno"> 465</span>&#160; }</div>
<div class="line"><a name="l00466"></a><span class="lineno"> 466</span>&#160;</div>
<div class="line"><a name="l00467"></a><span class="lineno"> 467</span>&#160; <span class="keywordflow">for</span>(<span class="keyword">auto</span> &amp;bi : b)</div>
<div class="line"><a name="l00468"></a><span class="lineno"> 468</span>&#160; {</div>
<div class="line"><a name="l00469"></a><span class="lineno"> 469</span>&#160; bi.allocator()-&gt;free();</div>
<div class="line"><a name="l00470"></a><span class="lineno"> 470</span>&#160; }</div>
<div class="line"><a name="l00471"></a><span class="lineno"> 471</span>&#160; }</div>
<div class="line"><a name="l00472"></a><span class="lineno"> 472</span>&#160; <span class="keywordflow">else</span></div>
<div class="line"><a name="l00473"></a><span class="lineno"> 473</span>&#160; {</div>
<div class="line"><a name="l00474"></a><span class="lineno"> 474</span>&#160; w[0].allocator()-&gt;free();</div>
<div class="line"><a name="l00475"></a><span class="lineno"> 475</span>&#160; w[2].allocator()-&gt;free();</div>
<div class="line"><a name="l00476"></a><span class="lineno"> 476</span>&#160; w[5].allocator()-&gt;free();</div>
<div class="line"><a name="l00477"></a><span class="lineno"> 477</span>&#160; w[6].allocator()-&gt;free();</div>
<div class="line"><a name="l00478"></a><span class="lineno"> 478</span>&#160; w[7].allocator()-&gt;free();</div>
<div class="line"><a name="l00479"></a><span class="lineno"> 479</span>&#160;</div>
<div class="line"><a name="l00480"></a><span class="lineno"> 480</span>&#160; b[5].allocator()-&gt;free();</div>
<div class="line"><a name="l00481"></a><span class="lineno"> 481</span>&#160; b[6].allocator()-&gt;free();</div>
<div class="line"><a name="l00482"></a><span class="lineno"> 482</span>&#160; b[7].allocator()-&gt;free();</div>
<div class="line"><a name="l00483"></a><span class="lineno"> 483</span>&#160;</div>
<div class="line"><a name="l00484"></a><span class="lineno"> 484</span>&#160; <span class="keywordflow">if</span>(_is_direct_conv)</div>
<div class="line"><a name="l00485"></a><span class="lineno"> 485</span>&#160; {</div>
<div class="line"><a name="l00486"></a><span class="lineno"> 486</span>&#160; w[3].allocator()-&gt;free();</div>
<div class="line"><a name="l00487"></a><span class="lineno"> 487</span>&#160; w[4].allocator()-&gt;free();</div>
<div class="line"><a name="l00488"></a><span class="lineno"> 488</span>&#160; b[2].allocator()-&gt;free();</div>
<div class="line"><a name="l00489"></a><span class="lineno"> 489</span>&#160; b[3].allocator()-&gt;free();</div>
<div class="line"><a name="l00490"></a><span class="lineno"> 490</span>&#160; b[4].allocator()-&gt;free();</div>
<div class="line"><a name="l00491"></a><span class="lineno"> 491</span>&#160; }</div>
<div class="line"><a name="l00492"></a><span class="lineno"> 492</span>&#160; }</div>
<div class="line"><a name="l00493"></a><span class="lineno"> 493</span>&#160;</div>
<div class="line"><a name="l00494"></a><span class="lineno"> 494</span>&#160; w11.reset();</div>
<div class="line"><a name="l00495"></a><span class="lineno"> 495</span>&#160; w12.reset();</div>
<div class="line"><a name="l00496"></a><span class="lineno"> 496</span>&#160; b11.reset();</div>
<div class="line"><a name="l00497"></a><span class="lineno"> 497</span>&#160; b11.reset();</div>
<div class="line"><a name="l00498"></a><span class="lineno"> 498</span>&#160; w31.reset();</div>
<div class="line"><a name="l00499"></a><span class="lineno"> 499</span>&#160; w32.reset();</div>
<div class="line"><a name="l00500"></a><span class="lineno"> 500</span>&#160; b31.reset();</div>
<div class="line"><a name="l00501"></a><span class="lineno"> 501</span>&#160; b32.reset();</div>
<div class="line"><a name="l00502"></a><span class="lineno"> 502</span>&#160; w41.reset();</div>
<div class="line"><a name="l00503"></a><span class="lineno"> 503</span>&#160; w42.reset();</div>
<div class="line"><a name="l00504"></a><span class="lineno"> 504</span>&#160; b41.reset();</div>
<div class="line"><a name="l00505"></a><span class="lineno"> 505</span>&#160; b42.reset();</div>
<div class="line"><a name="l00506"></a><span class="lineno"> 506</span>&#160;</div>
<div class="line"><a name="l00507"></a><span class="lineno"> 507</span>&#160; conv1_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00508"></a><span class="lineno"> 508</span>&#160; act1_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00509"></a><span class="lineno"> 509</span>&#160; norm1_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00510"></a><span class="lineno"> 510</span>&#160; pool1_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00511"></a><span class="lineno"> 511</span>&#160; conv2_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00512"></a><span class="lineno"> 512</span>&#160; act2_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00513"></a><span class="lineno"> 513</span>&#160; norm2_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00514"></a><span class="lineno"> 514</span>&#160; pool2_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00515"></a><span class="lineno"> 515</span>&#160; conv3_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00516"></a><span class="lineno"> 516</span>&#160; act3_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00517"></a><span class="lineno"> 517</span>&#160; conv4_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00518"></a><span class="lineno"> 518</span>&#160; act4_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00519"></a><span class="lineno"> 519</span>&#160; conv5_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00520"></a><span class="lineno"> 520</span>&#160; act5_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00521"></a><span class="lineno"> 521</span>&#160; pool5_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00522"></a><span class="lineno"> 522</span>&#160; fc6_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00523"></a><span class="lineno"> 523</span>&#160; act6_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00524"></a><span class="lineno"> 524</span>&#160; fc7_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00525"></a><span class="lineno"> 525</span>&#160; act7_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00526"></a><span class="lineno"> 526</span>&#160; fc8_out.allocator()-&gt;free();</div>
<div class="line"><a name="l00527"></a><span class="lineno"> 527</span>&#160; }</div>
</div><!-- fragment -->
</div>
</div>
<a class="anchor" id="a3a41262ce9aed70a248ecefae646013b"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">void feed </td>
<td>(</td>
<td class="paramtype">std::string&#160;</td>
<td class="paramname"><em>name</em></td><td>)</td>
<td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Feed input to network from file. </p>
<dl class="params"><dt>Parameters</dt><dd>
<table class="params">
<tr><td class="paramname">name</td><td>File name of containing the input data. </td></tr>
</table>
</dd>
</dl>
<p>Definition at line <a class="el" href="_alex_net_network_8h_source.xhtml#l00413">413</a> of file <a class="el" href="_alex_net_network_8h_source.xhtml">AlexNetNetwork.h</a>.</p>
<div class="fragment"><div class="line"><a name="l00414"></a><span class="lineno"> 414</span>&#160; {</div>
<div class="line"><a name="l00415"></a><span class="lineno"> 415</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_layer_data(Accessor(input), name);</div>
<div class="line"><a name="l00416"></a><span class="lineno"> 416</span>&#160; }</div>
<div class="ttc" id="namespacearm__compute_1_1test_xhtml_a71326f0909d77386e29b511e1990a11f"><div class="ttname"><a href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">arm_compute::test::library</a></div><div class="ttdeci">std::unique_ptr&lt; AssetsLibrary &gt; library</div><div class="ttdef"><b>Definition:</b> <a href="main_8cpp_source.xhtml#l00055">main.cpp:55</a></div></div>
</div><!-- fragment -->
</div>
</div>
<a class="anchor" id="aab0a3920e581535eeb32febaf20dca50"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">void fill </td>
<td>(</td>
<td class="paramtype">std::vector&lt; std::string &gt;&#160;</td>
<td class="paramname"><em>weights</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">std::vector&lt; std::string &gt;&#160;</td>
<td class="paramname"><em>biases</em>&#160;</td>
</tr>
<tr>
<td></td>
<td>)</td>
<td></td><td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Fills the trainable parameters from binary files. </p>
<dl class="params"><dt>Parameters</dt><dd>
<table class="params">
<tr><td class="paramname">weights</td><td>Files names containing the weights data </td></tr>
<tr><td class="paramname">biases</td><td>Files names containing the bias data </td></tr>
</table>
</dd>
</dl>
<p>Definition at line <a class="el" href="_alex_net_network_8h_source.xhtml#l00396">396</a> of file <a class="el" href="_alex_net_network_8h_source.xhtml">AlexNetNetwork.h</a>.</p>
<div class="fragment"><div class="line"><a name="l00397"></a><span class="lineno"> 397</span>&#160; {</div>
<div class="line"><a name="l00398"></a><span class="lineno"> 398</span>&#160; <a class="code" href="_error_8h.xhtml#a54a6080c9f4df1f908e57a9bbb46f5da">ARM_COMPUTE_ERROR_ON</a>(weights.size() != w.size());</div>
<div class="line"><a name="l00399"></a><span class="lineno"> 399</span>&#160; <a class="code" href="_error_8h.xhtml#a54a6080c9f4df1f908e57a9bbb46f5da">ARM_COMPUTE_ERROR_ON</a>(biases.size() != b.size());</div>
<div class="line"><a name="l00400"></a><span class="lineno"> 400</span>&#160; <a class="code" href="_error_8h.xhtml#a54a6080c9f4df1f908e57a9bbb46f5da">ARM_COMPUTE_ERROR_ON</a>(_reshaped_weights);</div>
<div class="line"><a name="l00401"></a><span class="lineno"> 401</span>&#160;</div>
<div class="line"><a name="l00402"></a><span class="lineno"> 402</span>&#160; <span class="keywordflow">for</span>(<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> i = 0; i &lt; weights.size(); ++i)</div>
<div class="line"><a name="l00403"></a><span class="lineno"> 403</span>&#160; {</div>
<div class="line"><a name="l00404"></a><span class="lineno"> 404</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_layer_data(Accessor(w[i]), weights[i]);</div>
<div class="line"><a name="l00405"></a><span class="lineno"> 405</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_layer_data(Accessor(b[i]), biases[i]);</div>
<div class="line"><a name="l00406"></a><span class="lineno"> 406</span>&#160; }</div>
<div class="line"><a name="l00407"></a><span class="lineno"> 407</span>&#160; }</div>
<div class="ttc" id="_error_8h_xhtml_a54a6080c9f4df1f908e57a9bbb46f5da"><div class="ttname"><a href="_error_8h.xhtml#a54a6080c9f4df1f908e57a9bbb46f5da">ARM_COMPUTE_ERROR_ON</a></div><div class="ttdeci">#define ARM_COMPUTE_ERROR_ON(cond)</div><div class="ttdoc">If the condition is true then an error message is printed and an exception thrown. </div><div class="ttdef"><b>Definition:</b> <a href="_error_8h_source.xhtml#l00124">Error.h:124</a></div></div>
<div class="ttc" id="namespacearm__compute_1_1test_xhtml_a71326f0909d77386e29b511e1990a11f"><div class="ttname"><a href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">arm_compute::test::library</a></div><div class="ttdeci">std::unique_ptr&lt; AssetsLibrary &gt; library</div><div class="ttdef"><b>Definition:</b> <a href="main_8cpp_source.xhtml#l00055">main.cpp:55</a></div></div>
</div><!-- fragment -->
</div>
</div>
<a class="anchor" id="a3b778cda9ac3fad08e7217edbcb942e0"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">void fill_random </td>
<td>(</td>
<td class="paramname"></td><td>)</td>
<td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Fills the trainable parameters and input with random data. </p>
<p>Definition at line <a class="el" href="_alex_net_network_8h_source.xhtml#l00346">346</a> of file <a class="el" href="_alex_net_network_8h_source.xhtml">AlexNetNetwork.h</a>.</p>
<div class="fragment"><div class="line"><a name="l00347"></a><span class="lineno"> 347</span>&#160; {</div>
<div class="line"><a name="l00348"></a><span class="lineno"> 348</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(input), 0);</div>
<div class="line"><a name="l00349"></a><span class="lineno"> 349</span>&#160;</div>
<div class="line"><a name="l00350"></a><span class="lineno"> 350</span>&#160; <span class="keywordflow">if</span>(!_reshaped_weights)</div>
<div class="line"><a name="l00351"></a><span class="lineno"> 351</span>&#160; {</div>
<div class="line"><a name="l00352"></a><span class="lineno"> 352</span>&#160; <span class="keywordflow">for</span>(<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> i = 0; i &lt; w.size(); ++i)</div>
<div class="line"><a name="l00353"></a><span class="lineno"> 353</span>&#160; {</div>
<div class="line"><a name="l00354"></a><span class="lineno"> 354</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(w[i]), i + 1);</div>
<div class="line"><a name="l00355"></a><span class="lineno"> 355</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(b[i]), i + 10);</div>
<div class="line"><a name="l00356"></a><span class="lineno"> 356</span>&#160; }</div>
<div class="line"><a name="l00357"></a><span class="lineno"> 357</span>&#160; }</div>
<div class="line"><a name="l00358"></a><span class="lineno"> 358</span>&#160; <span class="keywordflow">else</span></div>
<div class="line"><a name="l00359"></a><span class="lineno"> 359</span>&#160; {</div>
<div class="line"><a name="l00360"></a><span class="lineno"> 360</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(w[0]), 1);</div>
<div class="line"><a name="l00361"></a><span class="lineno"> 361</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(w[2]), 2);</div>
<div class="line"><a name="l00362"></a><span class="lineno"> 362</span>&#160;</div>
<div class="line"><a name="l00363"></a><span class="lineno"> 363</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(w[5]), 3);</div>
<div class="line"><a name="l00364"></a><span class="lineno"> 364</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(b[5]), 4);</div>
<div class="line"><a name="l00365"></a><span class="lineno"> 365</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(w[6]), 5);</div>
<div class="line"><a name="l00366"></a><span class="lineno"> 366</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(b[6]), 6);</div>
<div class="line"><a name="l00367"></a><span class="lineno"> 367</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(w[7]), 7);</div>
<div class="line"><a name="l00368"></a><span class="lineno"> 368</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(b[7]), 8);</div>
<div class="line"><a name="l00369"></a><span class="lineno"> 369</span>&#160;</div>
<div class="line"><a name="l00370"></a><span class="lineno"> 370</span>&#160; <span class="keywordflow">if</span>(!_is_direct_conv)</div>
<div class="line"><a name="l00371"></a><span class="lineno"> 371</span>&#160; {</div>
<div class="line"><a name="l00372"></a><span class="lineno"> 372</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(*dynamic_cast&lt;TensorType *&gt;(w11.get())), 9);</div>
<div class="line"><a name="l00373"></a><span class="lineno"> 373</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(*dynamic_cast&lt;TensorType *&gt;(w12.get())), 10);</div>
<div class="line"><a name="l00374"></a><span class="lineno"> 374</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(*dynamic_cast&lt;TensorType *&gt;(w31.get())), 11);</div>
<div class="line"><a name="l00375"></a><span class="lineno"> 375</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(*dynamic_cast&lt;TensorType *&gt;(w32.get())), 12);</div>
<div class="line"><a name="l00376"></a><span class="lineno"> 376</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(*dynamic_cast&lt;TensorType *&gt;(w41.get())), 13);</div>
<div class="line"><a name="l00377"></a><span class="lineno"> 377</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(*dynamic_cast&lt;TensorType *&gt;(w42.get())), 14);</div>
<div class="line"><a name="l00378"></a><span class="lineno"> 378</span>&#160; }</div>
<div class="line"><a name="l00379"></a><span class="lineno"> 379</span>&#160; <span class="keywordflow">else</span></div>
<div class="line"><a name="l00380"></a><span class="lineno"> 380</span>&#160; {</div>
<div class="line"><a name="l00381"></a><span class="lineno"> 381</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(w[1]), 9);</div>
<div class="line"><a name="l00382"></a><span class="lineno"> 382</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(b[1]), 10);</div>
<div class="line"><a name="l00383"></a><span class="lineno"> 383</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(w[3]), 11);</div>
<div class="line"><a name="l00384"></a><span class="lineno"> 384</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(b[3]), 12);</div>
<div class="line"><a name="l00385"></a><span class="lineno"> 385</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(w[4]), 13);</div>
<div class="line"><a name="l00386"></a><span class="lineno"> 386</span>&#160; <a class="code" href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">library</a>-&gt;fill_tensor_uniform(Accessor(b[4]), 14);</div>
<div class="line"><a name="l00387"></a><span class="lineno"> 387</span>&#160; }</div>
<div class="line"><a name="l00388"></a><span class="lineno"> 388</span>&#160; }</div>
<div class="line"><a name="l00389"></a><span class="lineno"> 389</span>&#160; }</div>
<div class="ttc" id="namespacearm__compute_1_1test_xhtml_a71326f0909d77386e29b511e1990a11f"><div class="ttname"><a href="namespacearm__compute_1_1test.xhtml#a71326f0909d77386e29b511e1990a11f">arm_compute::test::library</a></div><div class="ttdeci">std::unique_ptr&lt; AssetsLibrary &gt; library</div><div class="ttdef"><b>Definition:</b> <a href="main_8cpp_source.xhtml#l00055">main.cpp:55</a></div></div>
</div><!-- fragment -->
</div>
</div>
<a class="anchor" id="a1466ef70729f3c8b5da5ebfec3f53f26"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">std::vector&lt;unsigned int&gt; get_classifications </td>
<td>(</td>
<td class="paramname"></td><td>)</td>
<td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Get the classification results. </p>
<dl class="section return"><dt>Returns</dt><dd><a class="el" href="struct_vector.xhtml" title="Structure to hold Vector information. ">Vector</a> containing the classified labels </dd></dl>
<p>Definition at line <a class="el" href="_alex_net_network_8h_source.xhtml#l00422">422</a> of file <a class="el" href="_alex_net_network_8h_source.xhtml">AlexNetNetwork.h</a>.</p>
<div class="fragment"><div class="line"><a name="l00423"></a><span class="lineno"> 423</span>&#160; {</div>
<div class="line"><a name="l00424"></a><span class="lineno"> 424</span>&#160; std::vector&lt;unsigned int&gt; classified_labels;</div>
<div class="line"><a name="l00425"></a><span class="lineno"> 425</span>&#160; Accessor output_accessor(output);</div>
<div class="line"><a name="l00426"></a><span class="lineno"> 426</span>&#160;</div>
<div class="line"><a name="l00427"></a><span class="lineno"> 427</span>&#160; Window window;</div>
<div class="line"><a name="l00428"></a><span class="lineno"> 428</span>&#160; window.set(<a class="code" href="classarm__compute_1_1_window.xhtml#aa96e81276ee4f87ab386cd05a5539a7d">Window::DimX</a>, Window::Dimension(0, 1, 1));</div>
<div class="line"><a name="l00429"></a><span class="lineno"> 429</span>&#160; <span class="keywordflow">for</span>(<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> d = 1; d &lt; output_accessor.shape().num_dimensions(); ++d)</div>
<div class="line"><a name="l00430"></a><span class="lineno"> 430</span>&#160; {</div>
<div class="line"><a name="l00431"></a><span class="lineno"> 431</span>&#160; window.set(d, Window::Dimension(0, output_accessor.shape()[d], 1));</div>
<div class="line"><a name="l00432"></a><span class="lineno"> 432</span>&#160; }</div>
<div class="line"><a name="l00433"></a><span class="lineno"> 433</span>&#160;</div>
<div class="line"><a name="l00434"></a><span class="lineno"> 434</span>&#160; <a class="code" href="namespacearm__compute.xhtml#a6c0dcc38187027dcb89cd9724bc5a823">execute_window_loop</a>(window, [&amp;](<span class="keyword">const</span> Coordinates &amp; <span class="keywordtype">id</span>)</div>
<div class="line"><a name="l00435"></a><span class="lineno"> 435</span>&#160; {</div>
<div class="line"><a name="l00436"></a><span class="lineno"> 436</span>&#160; <span class="keywordtype">int</span> max_idx = 0;</div>
<div class="line"><a name="l00437"></a><span class="lineno"> 437</span>&#160; <span class="keywordtype">float</span> val = 0;</div>
<div class="line"><a name="l00438"></a><span class="lineno"> 438</span>&#160; <span class="keyword">const</span> <span class="keywordtype">void</span> *<span class="keyword">const</span> out_ptr = output_accessor(<span class="keywordtype">id</span>);</div>
<div class="line"><a name="l00439"></a><span class="lineno"> 439</span>&#160; <span class="keywordflow">for</span>(<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> l = 0; l &lt; output_accessor.shape().x(); ++l)</div>
<div class="line"><a name="l00440"></a><span class="lineno"> 440</span>&#160; {</div>
<div class="line"><a name="l00441"></a><span class="lineno"> 441</span>&#160; <span class="keywordtype">float</span> curr_val = <span class="keyword">reinterpret_cast&lt;</span><span class="keyword">const </span><span class="keywordtype">float</span> *<span class="keyword">&gt;</span>(out_ptr)[l];</div>
<div class="line"><a name="l00442"></a><span class="lineno"> 442</span>&#160; <span class="keywordflow">if</span>(curr_val &gt; val)</div>
<div class="line"><a name="l00443"></a><span class="lineno"> 443</span>&#160; {</div>
<div class="line"><a name="l00444"></a><span class="lineno"> 444</span>&#160; max_idx = l;</div>
<div class="line"><a name="l00445"></a><span class="lineno"> 445</span>&#160; val = curr_val;</div>
<div class="line"><a name="l00446"></a><span class="lineno"> 446</span>&#160; }</div>
<div class="line"><a name="l00447"></a><span class="lineno"> 447</span>&#160; }</div>
<div class="line"><a name="l00448"></a><span class="lineno"> 448</span>&#160; classified_labels.push_back(max_idx);</div>
<div class="line"><a name="l00449"></a><span class="lineno"> 449</span>&#160; });</div>
<div class="line"><a name="l00450"></a><span class="lineno"> 450</span>&#160; <span class="keywordflow">return</span> classified_labels;</div>
<div class="line"><a name="l00451"></a><span class="lineno"> 451</span>&#160; }</div>
<div class="ttc" id="classarm__compute_1_1_window_xhtml_aa96e81276ee4f87ab386cd05a5539a7d"><div class="ttname"><a href="classarm__compute_1_1_window.xhtml#aa96e81276ee4f87ab386cd05a5539a7d">arm_compute::Window::DimX</a></div><div class="ttdeci">static constexpr size_t DimX</div><div class="ttdoc">Alias for dimension 0 also known as X dimension. </div><div class="ttdef"><b>Definition:</b> <a href="_window_8h_source.xhtml#l00043">Window.h:43</a></div></div>
<div class="ttc" id="namespacearm__compute_xhtml_a6c0dcc38187027dcb89cd9724bc5a823"><div class="ttname"><a href="namespacearm__compute.xhtml#a6c0dcc38187027dcb89cd9724bc5a823">arm_compute::execute_window_loop</a></div><div class="ttdeci">void execute_window_loop(const Window &amp;w, L &amp;&amp;lambda_function, Ts &amp;&amp;...iterators)</div><div class="ttdoc">Iterate through the passed window, automatically adjusting the iterators and calling the lambda_funct...</div><div class="ttdef"><b>Definition:</b> <a href="_helpers_8inl_source.xhtml#l00127">Helpers.inl:127</a></div></div>
</div><!-- fragment -->
</div>
</div>
<a class="anchor" id="ab4d5b5821653f1eeabef922fbe3b9a91"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">void init </td>
<td>(</td>
<td class="paramtype"><a class="el" href="namespacearm__compute.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">DataType</a>&#160;</td>
<td class="paramname"><em>data_type</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">int&#160;</td>
<td class="paramname"><em>fixed_point_position</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">int&#160;</td>
<td class="paramname"><em>batches</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">bool&#160;</td>
<td class="paramname"><em>reshaped_weights</em> = <code>false</code>&#160;</td>
</tr>
<tr>
<td></td>
<td>)</td>
<td></td><td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Definition at line <a class="el" href="_alex_net_network_8h_source.xhtml#l00057">57</a> of file <a class="el" href="_alex_net_network_8h_source.xhtml">AlexNetNetwork.h</a>.</p>
<div class="fragment"><div class="line"><a name="l00058"></a><span class="lineno"> 58</span>&#160; {</div>
<div class="line"><a name="l00059"></a><span class="lineno"> 59</span>&#160; _data_type = <a class="code" href="namespacearm__compute_1_1test_1_1validation.xhtml#ac2ad7f431e3446fddcd9b6b9f93c4c14">data_type</a>;</div>
<div class="line"><a name="l00060"></a><span class="lineno"> 60</span>&#160; _fixed_point_position = fixed_point_position;</div>
<div class="line"><a name="l00061"></a><span class="lineno"> 61</span>&#160; _batches = batches;</div>
<div class="line"><a name="l00062"></a><span class="lineno"> 62</span>&#160; _reshaped_weights = reshaped_weights;</div>
<div class="line"><a name="l00063"></a><span class="lineno"> 63</span>&#160;</div>
<div class="line"><a name="l00064"></a><span class="lineno"> 64</span>&#160; <span class="comment">// Initialize weights and biases</span></div>
<div class="line"><a name="l00065"></a><span class="lineno"> 65</span>&#160; <span class="keywordflow">if</span>(!_reshaped_weights)</div>
<div class="line"><a name="l00066"></a><span class="lineno"> 66</span>&#160; {</div>
<div class="line"><a name="l00067"></a><span class="lineno"> 67</span>&#160; w[0].allocator()-&gt;init(TensorInfo(TensorShape(11U, 11U, 3U, 96U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00068"></a><span class="lineno"> 68</span>&#160; b[0].allocator()-&gt;init(TensorInfo(TensorShape(96U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00069"></a><span class="lineno"> 69</span>&#160; w[1].allocator()-&gt;init(TensorInfo(TensorShape(5U, 5U, 48U, 256U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00070"></a><span class="lineno"> 70</span>&#160; b[1].allocator()-&gt;init(TensorInfo(TensorShape(256U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00071"></a><span class="lineno"> 71</span>&#160; w[2].allocator()-&gt;init(TensorInfo(TensorShape(3U, 3U, 256U, 384U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00072"></a><span class="lineno"> 72</span>&#160; b[2].allocator()-&gt;init(TensorInfo(TensorShape(384U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00073"></a><span class="lineno"> 73</span>&#160; w[3].allocator()-&gt;init(TensorInfo(TensorShape(3U, 3U, 192U, 384U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00074"></a><span class="lineno"> 74</span>&#160; b[3].allocator()-&gt;init(TensorInfo(TensorShape(384U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00075"></a><span class="lineno"> 75</span>&#160; w[4].allocator()-&gt;init(TensorInfo(TensorShape(3U, 3U, 192U, 256U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00076"></a><span class="lineno"> 76</span>&#160; b[4].allocator()-&gt;init(TensorInfo(TensorShape(256U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00077"></a><span class="lineno"> 77</span>&#160; w[5].allocator()-&gt;init(TensorInfo(TensorShape(9216U, 4096U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00078"></a><span class="lineno"> 78</span>&#160; b[5].allocator()-&gt;init(TensorInfo(TensorShape(4096U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00079"></a><span class="lineno"> 79</span>&#160; w[6].allocator()-&gt;init(TensorInfo(TensorShape(4096U, 4096U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00080"></a><span class="lineno"> 80</span>&#160; b[6].allocator()-&gt;init(TensorInfo(TensorShape(4096U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00081"></a><span class="lineno"> 81</span>&#160; w[7].allocator()-&gt;init(TensorInfo(TensorShape(4096U, 1000U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00082"></a><span class="lineno"> 82</span>&#160; b[7].allocator()-&gt;init(TensorInfo(TensorShape(1000U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00083"></a><span class="lineno"> 83</span>&#160;</div>
<div class="line"><a name="l00084"></a><span class="lineno"> 84</span>&#160; w11 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;w[1], TensorShape(5U, 5U, 48U, 128U), Coordinates()));</div>
<div class="line"><a name="l00085"></a><span class="lineno"> 85</span>&#160; w12 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;w[1], TensorShape(5U, 5U, 48U, 128U), Coordinates(0, 0, 0, 128)));</div>
<div class="line"><a name="l00086"></a><span class="lineno"> 86</span>&#160; b11 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;b[1], TensorShape(128U), Coordinates()));</div>
<div class="line"><a name="l00087"></a><span class="lineno"> 87</span>&#160; b12 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;b[1], TensorShape(128U), Coordinates(128)));</div>
<div class="line"><a name="l00088"></a><span class="lineno"> 88</span>&#160;</div>
<div class="line"><a name="l00089"></a><span class="lineno"> 89</span>&#160; w31 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;w[3], TensorShape(3U, 3U, 192U, 192U), Coordinates()));</div>
<div class="line"><a name="l00090"></a><span class="lineno"> 90</span>&#160; w32 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;w[3], TensorShape(3U, 3U, 192U, 192U), Coordinates(0, 0, 0, 192)));</div>
<div class="line"><a name="l00091"></a><span class="lineno"> 91</span>&#160; b31 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;b[3], TensorShape(192U), Coordinates()));</div>
<div class="line"><a name="l00092"></a><span class="lineno"> 92</span>&#160; b32 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;b[3], TensorShape(192U), Coordinates(192)));</div>
<div class="line"><a name="l00093"></a><span class="lineno"> 93</span>&#160;</div>
<div class="line"><a name="l00094"></a><span class="lineno"> 94</span>&#160; w41 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;w[4], TensorShape(3U, 3U, 192U, 128U), Coordinates()));</div>
<div class="line"><a name="l00095"></a><span class="lineno"> 95</span>&#160; w42 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;w[4], TensorShape(3U, 3U, 192U, 128U), Coordinates(0, 0, 0, 128)));</div>
<div class="line"><a name="l00096"></a><span class="lineno"> 96</span>&#160; b41 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;b[4], TensorShape(128U), Coordinates()));</div>
<div class="line"><a name="l00097"></a><span class="lineno"> 97</span>&#160; b42 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;b[4], TensorShape(128U), Coordinates(128)));</div>
<div class="line"><a name="l00098"></a><span class="lineno"> 98</span>&#160; }</div>
<div class="line"><a name="l00099"></a><span class="lineno"> 99</span>&#160; <span class="keywordflow">else</span></div>
<div class="line"><a name="l00100"></a><span class="lineno"> 100</span>&#160; {</div>
<div class="line"><a name="l00101"></a><span class="lineno"> 101</span>&#160; <span class="keyword">auto</span> reshape = [&amp;](<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> width, <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> height, <span class="keywordtype">bool</span> <a class="code" href="namespacearm__compute_1_1test_1_1validation_1_1reference.xhtml#aeec300028ef21b06bc60da82c894a010">convolution_layer</a>) -&gt; TensorShape</div>
<div class="line"><a name="l00102"></a><span class="lineno"> 102</span>&#160; {</div>
<div class="line"><a name="l00103"></a><span class="lineno"> 103</span>&#160; <span class="keyword">const</span> <span class="keywordtype">bool</span> is_optimised = <a class="code" href="hwc_8hpp.xhtml#a0f61d63b009d0880a89c843bd50d8d76">std::is_same&lt;ITensorType, ITensor&gt;::value</a> &amp;&amp; <a class="code" href="classarm__compute_1_1_scheduler.xhtml#acb4f87f1831680d8d1b70e1bef06bb81">NEScheduler::get</a>().<a class="code" href="classarm__compute_1_1_i_scheduler.xhtml#a671307bb78c1d64435708feead8c04da">cpu_info</a>().<a class="code" href="structarm__compute_1_1_c_p_u_info.xhtml#a6446992b5303784eb509ecaf4fc4f8e6">CPU</a> &gt;= <a class="code" href="namespacearm__compute.xhtml#a5bc939238e1f14a4c39aaee8186a853da45a830a4c6240ac27c392266589a630c">CPUTarget::ARMV7</a> &amp;&amp; <a class="code" href="namespacearm__compute_1_1test_1_1validation.xhtml#ac2ad7f431e3446fddcd9b6b9f93c4c14">data_type</a> == <a class="code" href="namespacearm__compute.xhtml#ab4e88c89b3b7ea1735996cc4def22d58a44ad4ef5a76e6aa6fb3e3fa079a54fda">DataType::F32</a>;</div>
<div class="line"><a name="l00104"></a><span class="lineno"> 104</span>&#160;</div>
<div class="line"><a name="l00105"></a><span class="lineno"> 105</span>&#160; <span class="keywordflow">if</span>(<a class="code" href="namespacearm__compute_1_1test_1_1validation_1_1reference.xhtml#aeec300028ef21b06bc60da82c894a010">convolution_layer</a> &amp;&amp; is_optimised)</div>
<div class="line"><a name="l00106"></a><span class="lineno"> 106</span>&#160; {</div>
<div class="line"><a name="l00107"></a><span class="lineno"> 107</span>&#160; <span class="keywordflow">return</span> TensorShape{ height, width };</div>
<div class="line"><a name="l00108"></a><span class="lineno"> 108</span>&#160; }</div>
<div class="line"><a name="l00109"></a><span class="lineno"> 109</span>&#160; <span class="keywordflow">else</span></div>
<div class="line"><a name="l00110"></a><span class="lineno"> 110</span>&#160; {</div>
<div class="line"><a name="l00111"></a><span class="lineno"> 111</span>&#160; <span class="keyword">const</span> <span class="keywordtype">int</span> interleave_width = 16 / <a class="code" href="namespacearm__compute.xhtml#abb7e0f23a4f2e63f39433f158dad47ab">arm_compute::data_size_from_type</a>(_data_type);</div>
<div class="line"><a name="l00112"></a><span class="lineno"> 112</span>&#160;</div>
<div class="line"><a name="l00113"></a><span class="lineno"> 113</span>&#160; <span class="keywordflow">return</span> TensorShape{ width * interleave_width, <span class="keyword">static_cast&lt;</span><span class="keywordtype">unsigned</span> <span class="keywordtype">int</span><span class="keyword">&gt;</span>(std::ceil(static_cast&lt;float&gt;(height) / interleave_width)) };</div>
<div class="line"><a name="l00114"></a><span class="lineno"> 114</span>&#160; }</div>
<div class="line"><a name="l00115"></a><span class="lineno"> 115</span>&#160; };</div>
<div class="line"><a name="l00116"></a><span class="lineno"> 116</span>&#160;</div>
<div class="line"><a name="l00117"></a><span class="lineno"> 117</span>&#160; <span class="comment">// Create tensor for the reshaped weights</span></div>
<div class="line"><a name="l00118"></a><span class="lineno"> 118</span>&#160; w[0].allocator()-&gt;init(TensorInfo(reshape(366U, 96U, <span class="keyword">true</span>), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00119"></a><span class="lineno"> 119</span>&#160;</div>
<div class="line"><a name="l00120"></a><span class="lineno"> 120</span>&#160; <span class="comment">// Configure the direct convolution&#39;s weights. Direct convolution doesn&#39;t need reshape weights</span></div>
<div class="line"><a name="l00121"></a><span class="lineno"> 121</span>&#160; <span class="keywordflow">if</span>(!_is_direct_conv)</div>
<div class="line"><a name="l00122"></a><span class="lineno"> 122</span>&#160; {</div>
<div class="line"><a name="l00123"></a><span class="lineno"> 123</span>&#160; <span class="keyword">auto</span> w11_tensor = std::unique_ptr&lt;TensorType&gt;(<span class="keyword">new</span> TensorType());</div>
<div class="line"><a name="l00124"></a><span class="lineno"> 124</span>&#160; <span class="keyword">auto</span> w12_tensor = std::unique_ptr&lt;TensorType&gt;(<span class="keyword">new</span> TensorType());</div>
<div class="line"><a name="l00125"></a><span class="lineno"> 125</span>&#160; <span class="keyword">auto</span> w31_tensor = std::unique_ptr&lt;TensorType&gt;(<span class="keyword">new</span> TensorType());</div>
<div class="line"><a name="l00126"></a><span class="lineno"> 126</span>&#160; <span class="keyword">auto</span> w32_tensor = std::unique_ptr&lt;TensorType&gt;(<span class="keyword">new</span> TensorType());</div>
<div class="line"><a name="l00127"></a><span class="lineno"> 127</span>&#160; <span class="keyword">auto</span> w41_tensor = std::unique_ptr&lt;TensorType&gt;(<span class="keyword">new</span> TensorType());</div>
<div class="line"><a name="l00128"></a><span class="lineno"> 128</span>&#160; <span class="keyword">auto</span> w42_tensor = std::unique_ptr&lt;TensorType&gt;(<span class="keyword">new</span> TensorType());</div>
<div class="line"><a name="l00129"></a><span class="lineno"> 129</span>&#160; w11_tensor-&gt;allocator()-&gt;init(TensorInfo(reshape(1248U, 128U, <span class="keyword">true</span>), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00130"></a><span class="lineno"> 130</span>&#160; w12_tensor-&gt;allocator()-&gt;init(TensorInfo(reshape(1248U, 128U, <span class="keyword">true</span>), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00131"></a><span class="lineno"> 131</span>&#160; w31_tensor-&gt;allocator()-&gt;init(TensorInfo(reshape(1920U, 192U, <span class="keyword">true</span>), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00132"></a><span class="lineno"> 132</span>&#160; w32_tensor-&gt;allocator()-&gt;init(TensorInfo(reshape(1920U, 192U, <span class="keyword">true</span>), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00133"></a><span class="lineno"> 133</span>&#160; w41_tensor-&gt;allocator()-&gt;init(TensorInfo(reshape(1920U, 128U, <span class="keyword">true</span>), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00134"></a><span class="lineno"> 134</span>&#160; w42_tensor-&gt;allocator()-&gt;init(TensorInfo(reshape(1920U, 128U, <span class="keyword">true</span>), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00135"></a><span class="lineno"> 135</span>&#160; w[2].allocator()-&gt;init(TensorInfo(reshape(2560U, 384U, <span class="keyword">true</span>), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00136"></a><span class="lineno"> 136</span>&#160; w11 = std::move(w11_tensor);</div>
<div class="line"><a name="l00137"></a><span class="lineno"> 137</span>&#160; w12 = std::move(w12_tensor);</div>
<div class="line"><a name="l00138"></a><span class="lineno"> 138</span>&#160; w31 = std::move(w31_tensor);</div>
<div class="line"><a name="l00139"></a><span class="lineno"> 139</span>&#160; w32 = std::move(w32_tensor);</div>
<div class="line"><a name="l00140"></a><span class="lineno"> 140</span>&#160; w41 = std::move(w41_tensor);</div>
<div class="line"><a name="l00141"></a><span class="lineno"> 141</span>&#160; w42 = std::move(w42_tensor);</div>
<div class="line"><a name="l00142"></a><span class="lineno"> 142</span>&#160; }</div>
<div class="line"><a name="l00143"></a><span class="lineno"> 143</span>&#160; <span class="keywordflow">else</span></div>
<div class="line"><a name="l00144"></a><span class="lineno"> 144</span>&#160; {</div>
<div class="line"><a name="l00145"></a><span class="lineno"> 145</span>&#160; w[1].allocator()-&gt;init(TensorInfo(TensorShape(5U, 5U, 48U, 256U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00146"></a><span class="lineno"> 146</span>&#160; b[1].allocator()-&gt;init(TensorInfo(TensorShape(256U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00147"></a><span class="lineno"> 147</span>&#160; w[2].allocator()-&gt;init(TensorInfo(TensorShape(3U, 3U, 256U, 384U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00148"></a><span class="lineno"> 148</span>&#160; b[2].allocator()-&gt;init(TensorInfo(TensorShape(384U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00149"></a><span class="lineno"> 149</span>&#160; w[3].allocator()-&gt;init(TensorInfo(TensorShape(3U, 3U, 192U, 384U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00150"></a><span class="lineno"> 150</span>&#160; b[3].allocator()-&gt;init(TensorInfo(TensorShape(384U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00151"></a><span class="lineno"> 151</span>&#160; w[4].allocator()-&gt;init(TensorInfo(TensorShape(3U, 3U, 192U, 256U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00152"></a><span class="lineno"> 152</span>&#160; b[4].allocator()-&gt;init(TensorInfo(TensorShape(256U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00153"></a><span class="lineno"> 153</span>&#160; w11 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;w[1], TensorShape(5U, 5U, 48U, 128U), Coordinates()));</div>
<div class="line"><a name="l00154"></a><span class="lineno"> 154</span>&#160; w12 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;w[1], TensorShape(5U, 5U, 48U, 128U), Coordinates(0, 0, 0, 128)));</div>
<div class="line"><a name="l00155"></a><span class="lineno"> 155</span>&#160; b11 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;b[1], TensorShape(128U), Coordinates()));</div>
<div class="line"><a name="l00156"></a><span class="lineno"> 156</span>&#160; b12 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;b[1], TensorShape(128U), Coordinates(128)));</div>
<div class="line"><a name="l00157"></a><span class="lineno"> 157</span>&#160;</div>
<div class="line"><a name="l00158"></a><span class="lineno"> 158</span>&#160; w31 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;w[3], TensorShape(3U, 3U, 192U, 192U), Coordinates()));</div>
<div class="line"><a name="l00159"></a><span class="lineno"> 159</span>&#160; w32 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;w[3], TensorShape(3U, 3U, 192U, 192U), Coordinates(0, 0, 0, 192)));</div>
<div class="line"><a name="l00160"></a><span class="lineno"> 160</span>&#160; b31 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;b[3], TensorShape(192U), Coordinates()));</div>
<div class="line"><a name="l00161"></a><span class="lineno"> 161</span>&#160; b32 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;b[3], TensorShape(192U), Coordinates(192)));</div>
<div class="line"><a name="l00162"></a><span class="lineno"> 162</span>&#160;</div>
<div class="line"><a name="l00163"></a><span class="lineno"> 163</span>&#160; w41 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;w[4], TensorShape(3U, 3U, 192U, 128U), Coordinates()));</div>
<div class="line"><a name="l00164"></a><span class="lineno"> 164</span>&#160; w42 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;w[4], TensorShape(3U, 3U, 192U, 128U), Coordinates(0, 0, 0, 128)));</div>
<div class="line"><a name="l00165"></a><span class="lineno"> 165</span>&#160; b41 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;b[4], TensorShape(128U), Coordinates()));</div>
<div class="line"><a name="l00166"></a><span class="lineno"> 166</span>&#160; b42 = std::unique_ptr&lt;SubTensorType&gt;(<span class="keyword">new</span> SubTensorType(&amp;b[4], TensorShape(128U), Coordinates(128)));</div>
<div class="line"><a name="l00167"></a><span class="lineno"> 167</span>&#160; }</div>
<div class="line"><a name="l00168"></a><span class="lineno"> 168</span>&#160;</div>
<div class="line"><a name="l00169"></a><span class="lineno"> 169</span>&#160; b[5].allocator()-&gt;init(TensorInfo(TensorShape(4096U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00170"></a><span class="lineno"> 170</span>&#160; b[6].allocator()-&gt;init(TensorInfo(TensorShape(4096U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00171"></a><span class="lineno"> 171</span>&#160; b[7].allocator()-&gt;init(TensorInfo(TensorShape(1000U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00172"></a><span class="lineno"> 172</span>&#160;</div>
<div class="line"><a name="l00173"></a><span class="lineno"> 173</span>&#160; <span class="keywordflow">if</span>(_batches &gt; 1 &amp;&amp; <a class="code" href="hwc_8hpp.xhtml#a0f61d63b009d0880a89c843bd50d8d76">std::is_same&lt;TensorType, Tensor&gt;::value</a>)</div>
<div class="line"><a name="l00174"></a><span class="lineno"> 174</span>&#160; {</div>
<div class="line"><a name="l00175"></a><span class="lineno"> 175</span>&#160; w[5].allocator()-&gt;init(TensorInfo(reshape(9216U, 4096U, <span class="keyword">false</span>), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00176"></a><span class="lineno"> 176</span>&#160; w[6].allocator()-&gt;init(TensorInfo(reshape(4096U, 4096U, <span class="keyword">false</span>), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00177"></a><span class="lineno"> 177</span>&#160; w[7].allocator()-&gt;init(TensorInfo(reshape(4096U, 1000U, <span class="keyword">false</span>), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00178"></a><span class="lineno"> 178</span>&#160; }</div>
<div class="line"><a name="l00179"></a><span class="lineno"> 179</span>&#160; <span class="keywordflow">else</span></div>
<div class="line"><a name="l00180"></a><span class="lineno"> 180</span>&#160; {</div>
<div class="line"><a name="l00181"></a><span class="lineno"> 181</span>&#160; w[5].allocator()-&gt;init(TensorInfo(TensorShape(4096U, 9216U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00182"></a><span class="lineno"> 182</span>&#160; w[6].allocator()-&gt;init(TensorInfo(TensorShape(4096U, 4096U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00183"></a><span class="lineno"> 183</span>&#160; w[7].allocator()-&gt;init(TensorInfo(TensorShape(1000U, 4096U), 1, _data_type, _fixed_point_position));</div>
<div class="line"><a name="l00184"></a><span class="lineno"> 184</span>&#160; }</div>
<div class="line"><a name="l00185"></a><span class="lineno"> 185</span>&#160; }</div>
<div class="line"><a name="l00186"></a><span class="lineno"> 186</span>&#160; }</div>
<div class="ttc" id="namespacearm__compute_xhtml_a5bc939238e1f14a4c39aaee8186a853da45a830a4c6240ac27c392266589a630c"><div class="ttname"><a href="namespacearm__compute.xhtml#a5bc939238e1f14a4c39aaee8186a853da45a830a4c6240ac27c392266589a630c">arm_compute::CPUTarget::ARMV7</a></div></div>
<div class="ttc" id="namespacearm__compute_xhtml_ab4e88c89b3b7ea1735996cc4def22d58a44ad4ef5a76e6aa6fb3e3fa079a54fda"><div class="ttname"><a href="namespacearm__compute.xhtml#ab4e88c89b3b7ea1735996cc4def22d58a44ad4ef5a76e6aa6fb3e3fa079a54fda">arm_compute::Format::F32</a></div><div class="ttdoc">1 channel, 1 F16 per channel </div></div>
<div class="ttc" id="namespacearm__compute_1_1test_1_1validation_xhtml_ac2ad7f431e3446fddcd9b6b9f93c4c14"><div class="ttname"><a href="namespacearm__compute_1_1test_1_1validation.xhtml#ac2ad7f431e3446fddcd9b6b9f93c4c14">arm_compute::test::validation::data_type</a></div><div class="ttdeci">data_type</div><div class="ttdef"><b>Definition:</b> <a href="_c_l_2_min_max_location_8cpp_source.xhtml#l00090">MinMaxLocation.cpp:90</a></div></div>
<div class="ttc" id="namespacearm__compute_1_1test_1_1validation_1_1reference_xhtml_aeec300028ef21b06bc60da82c894a010"><div class="ttname"><a href="namespacearm__compute_1_1test_1_1validation_1_1reference.xhtml#aeec300028ef21b06bc60da82c894a010">arm_compute::test::validation::reference::convolution_layer</a></div><div class="ttdeci">SimpleTensor&lt; T &gt; convolution_layer(const SimpleTensor&lt; T &gt; &amp;src, const SimpleTensor&lt; T &gt; &amp;weights, const SimpleTensor&lt; T &gt; &amp;bias, const TensorShape &amp;output_shape, const PadStrideInfo &amp;info)</div><div class="ttdef"><b>Definition:</b> <a href="validation_2_c_p_p_2_convolution_layer_8cpp_source.xhtml#l00137">ConvolutionLayer.cpp:137</a></div></div>
<div class="ttc" id="namespacearm__compute_xhtml_abb7e0f23a4f2e63f39433f158dad47ab"><div class="ttname"><a href="namespacearm__compute.xhtml#abb7e0f23a4f2e63f39433f158dad47ab">arm_compute::data_size_from_type</a></div><div class="ttdeci">size_t data_size_from_type(DataType data_type)</div><div class="ttdoc">The size in bytes of the data type. </div><div class="ttdef"><b>Definition:</b> <a href="arm__compute_2core_2_utils_8h_source.xhtml#l00088">Utils.h:88</a></div></div>
<div class="ttc" id="hwc_8hpp_xhtml_a0f61d63b009d0880a89c843bd50d8d76"><div class="ttname"><a href="hwc_8hpp.xhtml#a0f61d63b009d0880a89c843bd50d8d76">value</a></div><div class="ttdeci">void * value</div><div class="ttdef"><b>Definition:</b> <a href="hwc_8hpp_source.xhtml#l00269">hwc.hpp:269</a></div></div>
<div class="ttc" id="classarm__compute_1_1_i_scheduler_xhtml_a671307bb78c1d64435708feead8c04da"><div class="ttname"><a href="classarm__compute_1_1_i_scheduler.xhtml#a671307bb78c1d64435708feead8c04da">arm_compute::IScheduler::cpu_info</a></div><div class="ttdeci">CPUInfo cpu_info() const </div><div class="ttdoc">Get CPU info. </div></div>
<div class="ttc" id="structarm__compute_1_1_c_p_u_info_xhtml_a6446992b5303784eb509ecaf4fc4f8e6"><div class="ttname"><a href="structarm__compute_1_1_c_p_u_info.xhtml#a6446992b5303784eb509ecaf4fc4f8e6">arm_compute::CPUInfo::CPU</a></div><div class="ttdeci">CPUTarget CPU</div><div class="ttdef"><b>Definition:</b> <a href="_c_p_p_types_8h_source.xhtml#l00053">CPPTypes.h:53</a></div></div>
<div class="ttc" id="classarm__compute_1_1_scheduler_xhtml_acb4f87f1831680d8d1b70e1bef06bb81"><div class="ttname"><a href="classarm__compute_1_1_scheduler.xhtml#acb4f87f1831680d8d1b70e1bef06bb81">arm_compute::Scheduler::get</a></div><div class="ttdeci">static IScheduler &amp; get()</div><div class="ttdoc">Access the scheduler singleton. </div></div>
</div><!-- fragment -->
</div>
</div>
<a class="anchor" id="a13a43e6d814de94978c515cb084873b1"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">void run </td>
<td>(</td>
<td class="paramname"></td><td>)</td>
<td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Runs the model. </p>
<p>Definition at line <a class="el" href="_alex_net_network_8h_source.xhtml#l00530">530</a> of file <a class="el" href="_alex_net_network_8h_source.xhtml">AlexNetNetwork.h</a>.</p>
<div class="fragment"><div class="line"><a name="l00531"></a><span class="lineno"> 531</span>&#160; {</div>
<div class="line"><a name="l00532"></a><span class="lineno"> 532</span>&#160; <span class="comment">// Layer 1</span></div>
<div class="line"><a name="l00533"></a><span class="lineno"> 533</span>&#160; conv1.run();</div>
<div class="line"><a name="l00534"></a><span class="lineno"> 534</span>&#160; act1.run();</div>
<div class="line"><a name="l00535"></a><span class="lineno"> 535</span>&#160; norm1.run();</div>
<div class="line"><a name="l00536"></a><span class="lineno"> 536</span>&#160; pool1.run();</div>
<div class="line"><a name="l00537"></a><span class="lineno"> 537</span>&#160; <span class="comment">// Layer 2</span></div>
<div class="line"><a name="l00538"></a><span class="lineno"> 538</span>&#160; conv21.run();</div>
<div class="line"><a name="l00539"></a><span class="lineno"> 539</span>&#160; conv22.run();</div>
<div class="line"><a name="l00540"></a><span class="lineno"> 540</span>&#160; act2.run();</div>
<div class="line"><a name="l00541"></a><span class="lineno"> 541</span>&#160; norm2.run();</div>
<div class="line"><a name="l00542"></a><span class="lineno"> 542</span>&#160; pool2.run();</div>
<div class="line"><a name="l00543"></a><span class="lineno"> 543</span>&#160; <span class="comment">// Layer 3</span></div>
<div class="line"><a name="l00544"></a><span class="lineno"> 544</span>&#160; conv3.run();</div>
<div class="line"><a name="l00545"></a><span class="lineno"> 545</span>&#160; act3.run();</div>
<div class="line"><a name="l00546"></a><span class="lineno"> 546</span>&#160; <span class="comment">// Layer 4</span></div>
<div class="line"><a name="l00547"></a><span class="lineno"> 547</span>&#160; conv41.run();</div>
<div class="line"><a name="l00548"></a><span class="lineno"> 548</span>&#160; conv42.run();</div>
<div class="line"><a name="l00549"></a><span class="lineno"> 549</span>&#160; act4.run();</div>
<div class="line"><a name="l00550"></a><span class="lineno"> 550</span>&#160; <span class="comment">// Layer 5</span></div>
<div class="line"><a name="l00551"></a><span class="lineno"> 551</span>&#160; conv51.run();</div>
<div class="line"><a name="l00552"></a><span class="lineno"> 552</span>&#160; conv52.run();</div>
<div class="line"><a name="l00553"></a><span class="lineno"> 553</span>&#160; act5.run();</div>
<div class="line"><a name="l00554"></a><span class="lineno"> 554</span>&#160; pool5.run();</div>
<div class="line"><a name="l00555"></a><span class="lineno"> 555</span>&#160; <span class="comment">// Layer 6</span></div>
<div class="line"><a name="l00556"></a><span class="lineno"> 556</span>&#160; fc6.run();</div>
<div class="line"><a name="l00557"></a><span class="lineno"> 557</span>&#160; act6.run();</div>
<div class="line"><a name="l00558"></a><span class="lineno"> 558</span>&#160; <span class="comment">// Layer 7</span></div>
<div class="line"><a name="l00559"></a><span class="lineno"> 559</span>&#160; fc7.run();</div>
<div class="line"><a name="l00560"></a><span class="lineno"> 560</span>&#160; act7.run();</div>
<div class="line"><a name="l00561"></a><span class="lineno"> 561</span>&#160; <span class="comment">// Layer 8</span></div>
<div class="line"><a name="l00562"></a><span class="lineno"> 562</span>&#160; fc8.run();</div>
<div class="line"><a name="l00563"></a><span class="lineno"> 563</span>&#160; <span class="comment">// Softmax</span></div>
<div class="line"><a name="l00564"></a><span class="lineno"> 564</span>&#160; smx.run();</div>
<div class="line"><a name="l00565"></a><span class="lineno"> 565</span>&#160; }</div>
</div><!-- fragment -->
</div>
</div>
<hr/>The documentation for this class was generated from the following file:<ul>
<li>tests/networks/<a class="el" href="_alex_net_network_8h_source.xhtml">AlexNetNetwork.h</a></li>
</ul>
</div><!-- contents -->
</div><!-- doc-content -->
<!-- start footer part -->
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
<ul>
<li class="navelem"><a class="el" href="namespacearm__compute.xhtml">arm_compute</a></li><li class="navelem"><a class="el" href="namespacearm__compute_1_1test.xhtml">test</a></li><li class="navelem"><a class="el" href="namespacearm__compute_1_1test_1_1networks.xhtml">networks</a></li><li class="navelem"><a class="el" href="classarm__compute_1_1test_1_1networks_1_1_alex_net_network.xhtml">AlexNetNetwork</a></li>
<li class="footer">Generated on Thu Oct 12 2017 14:26:38 for Compute Library by
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.6 </li>
</ul>
</div>
</body>
</html>