Profile optimizer in ExecuteNetwork
Signed-off-by: Derek Lamberti <derek.lamberti@arm.com>
Change-Id: I04fb80c967bba4bb377de419bde618c1cbb80075
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp
index b982df3..0251196 100644
--- a/tests/InferenceModel.hpp
+++ b/tests/InferenceModel.hpp
@@ -450,9 +450,9 @@
options.m_ReduceFp32ToFp16 = params.m_EnableFp16TurboMode;
options.m_ReduceFp32ToBf16 = params.m_EnableBf16TurboMode;
options.m_Debug = params.m_PrintIntermediateLayers;
-
options.m_shapeInferenceMethod = params.m_InferOutputShape ?
armnn::ShapeInferenceMethod::InferAndValidate : armnn::ShapeInferenceMethod::ValidateOnly;
+ options.m_ProfilingEnabled = m_EnableProfiling;
armnn::BackendOptions gpuAcc("GpuAcc",
{