Add new op QUANTIZE

Fix: 113563338
Test: NeuralNetworksTest_static
Change-Id: I98036109c998513595463a7fc5c453f2c8b9acf3
diff --git a/common/CpuExecutor.cpp b/common/CpuExecutor.cpp
index d2c6dbf..d31a78d 100644
--- a/common/CpuExecutor.cpp
+++ b/common/CpuExecutor.cpp
@@ -443,6 +443,24 @@
                                                     input.shape());
             }
         } break;
+        case OperationType::QUANTIZE: {
+            if (!allParametersPresent(1, 1)) {
+                return ANEURALNETWORKS_BAD_DATA;
+            }
+            const RunTimeOperandInfo& input = mOperands[ins[0]];
+            RunTimeOperandInfo& output = mOperands[outs[0]];
+            Shape outShape = output.shape();
+
+            if (!quantizePrepare(input.shape(), &outShape) ||
+                !setInfoAndAllocateIfNeeded(&output, outShape)) {
+                break;
+            }
+            if (input.type == OperandType::TENSOR_FLOAT32) {
+                success = quantizeFloat32ToQuant8(reinterpret_cast<const float*>(input.buffer),
+                                                  reinterpret_cast<uint8_t*>(output.buffer),
+                                                  output.shape());
+            }
+        } break;
         case OperationType::DEPTHWISE_CONV_2D: {
             const size_t inCount = ins.size();
             if ((inCount != 11 && inCount != 8) || !allParametersPresent(inCount, 1)) {
diff --git a/common/OperationsUtils.cpp b/common/OperationsUtils.cpp
index e86b4a5..383706d 100644
--- a/common/OperationsUtils.cpp
+++ b/common/OperationsUtils.cpp
@@ -257,6 +257,23 @@
     return true;
 }
 
+bool quantizePrepare(const Shape& input, Shape* output) {
+    if (input.type != OperandType::TENSOR_FLOAT32) {
+        LOG(ERROR) << "QUANTIZE input must be TENSOR_FLOAT32";
+        return false;
+    }
+    if (output->type != OperandType::TENSOR_QUANT8_ASYMM) {
+        LOG(ERROR) << "QUANTIZE output must be TENSOR_QUANT8_ASYMM";
+        return false;
+    }
+    if (input.dimensions.size() != output->dimensions.size()) {
+        LOG(ERROR) << "QUANTIZE input and output tensors must have the same rank";
+        return false;
+    }
+    output->dimensions = input.dimensions;
+    return true;
+}
+
 bool convPrepare(const Shape& input,
                  const Shape& filter,
                  const Shape& bias,
diff --git a/common/Utils.cpp b/common/Utils.cpp
index 93ba7f1..2aeb40c 100644
--- a/common/Utils.cpp
+++ b/common/Utils.cpp
@@ -541,6 +541,26 @@
                                                  outputCount, outputIndexes,
                                                  outExpectedTypes);
         }
+        case ANEURALNETWORKS_QUANTIZE: {
+            if (inputCount != 1 || outputCount != 1) {
+                logInvalidInOutNumber(1, 1);
+                return ANEURALNETWORKS_BAD_DATA;
+            }
+            auto inputType = operands[inputIndexes[0]].type;
+            std::vector<OperandType> inExpectedTypes;
+            std::vector<OperandType> outExpectedTypes;
+            if (inputType == OperandType::TENSOR_FLOAT32) {
+                inExpectedTypes = {inputType};
+                outExpectedTypes = {OperandType::TENSOR_QUANT8_ASYMM};
+            } else {
+                LOG(ERROR) << "Unsupported input tensor type for operation "
+                           << kOperationNames[opType];
+                return ANEURALNETWORKS_BAD_DATA;
+            }
+            return validateOperationOperandTypes(operands, inputCount, inputIndexes,
+                                                 inExpectedTypes, outputCount, outputIndexes,
+                                                 outExpectedTypes);
+        }
         case ANEURALNETWORKS_DEPTHWISE_CONV_2D: {
             if ((inputCount != 11 && inputCount != 8) || outputCount != 1) {
                 LOG(ERROR) << "Invalid number of input operands ("
diff --git a/common/include/Operations.h b/common/include/Operations.h
index 109e227..c234d1c 100644
--- a/common/include/Operations.h
+++ b/common/include/Operations.h
@@ -62,6 +62,8 @@
                                float* outputData,
                                const Shape& shape);
 
+bool quantizeFloat32ToQuant8(const float* inputData, uint8_t* outputData, const Shape& outputShape);
+
 bool depthwiseConvFloat32(const float* inputData, const Shape& inputShape,
                           const float* filterData, const Shape& filterShape,
                           const float* biasData, const Shape& biasShape,
diff --git a/common/include/OperationsUtils.h b/common/include/OperationsUtils.h
index c912290..65b5497 100644
--- a/common/include/OperationsUtils.h
+++ b/common/include/OperationsUtils.h
@@ -189,6 +189,8 @@
 
 bool dequantizePrepare(const Shape& input, Shape* output);
 
+bool quantizePrepare(const Shape& input, Shape* output);
+
 bool depthwiseConvPrepare(const Shape& input,
                           const Shape& filter,
                           const Shape& bias,
diff --git a/common/operations/SimpleMath.cpp b/common/operations/SimpleMath.cpp
index ca0a4f5..8843a26 100644
--- a/common/operations/SimpleMath.cpp
+++ b/common/operations/SimpleMath.cpp
@@ -299,6 +299,18 @@
     return true;
 }
 
+bool quantizeFloat32ToQuant8(const float* inputData, uint8_t* outputData,
+                             const Shape& outputShape) {
+    NNTRACE_COMP("quantizeFloat32ToQuant8");
+    uint32_t size = getNumberOfElements(outputShape);
+    for (uint32_t i = 0; i < size; ++i) {
+        outputData[i] = static_cast<uint8_t>(std::max<float>(
+                0, std::min<float>(255, outputShape.offset +
+                                                std::round(inputData[i] / outputShape.scale))));
+    }
+    return true;
+}
+
 bool subFloat32(const float* in1, const Shape& shape1,
                 const float* in2, const Shape& shape2,
                 int32_t activation,
diff --git a/runtime/include/NeuralNetworks.h b/runtime/include/NeuralNetworks.h
index 7c5e908..e5b5b48 100644
--- a/runtime/include/NeuralNetworks.h
+++ b/runtime/include/NeuralNetworks.h
@@ -2156,7 +2156,28 @@
      */
     ANEURALNETWORKS_PRELU = 68,
     ANEURALNETWORKS_PRIOR_BOX = 69,
+
+    /**
+     * Quantizes the input tensor.
+     *
+     * The formula is:
+     *
+     *     output = max(0, min(255, round(input / scale) + zeroPoint)
+     *
+     * Supported tensor {@link OperandCode}:
+     * * {@link ANEURALNETWORKS_TENSOR_FLOAT32}
+     *
+     * Inputs:
+     * * 0: A tensor of {@link ANEURALNETWORKS_TENSOR_FLOAT32}.
+     *
+     * Outputs:
+     * * 0: The output tensor of same shape as input0, but with
+     *      {@link ANEURALNETWORKS_TENSOR_QUANT8_ASYMM}.
+     *
+     * Available since API level 29.
+     */
     ANEURALNETWORKS_QUANTIZE = 70,
+
     ANEURALNETWORKS_QUANTIZED_16BIT_LSTM = 71,
     ANEURALNETWORKS_RANDOM_MULTINOMIAL = 72,
     ANEURALNETWORKS_REDUCE = 73,
diff --git a/runtime/test/for-cts/TestGeneratedOneFile.cpp b/runtime/test/for-cts/TestGeneratedOneFile.cpp
index be05f06..4fe3edd 100644
--- a/runtime/test/for-cts/TestGeneratedOneFile.cpp
+++ b/runtime/test/for-cts/TestGeneratedOneFile.cpp
@@ -354,6 +354,7 @@
 #include "../generated/tests/pad_v2_1_float_relaxed.mod.py.cpp"
 #include "../generated/tests/pad_v2_1_quant8.mod.py.cpp"
 #include "../generated/tests/prelu.mod.py.cpp"
+#include "../generated/tests/quantize.mod.py.cpp"
 #include "../generated/tests/roi_align.mod.py.cpp"
 #include "../generated/tests/split_float_1.mod.py.cpp"
 #include "../generated/tests/split_float_2.mod.py.cpp"
diff --git a/runtime/test/generated/all_generated_V1_2_vts_tests.cpp b/runtime/test/generated/all_generated_V1_2_vts_tests.cpp
index 5b1ab30..7b877d7 100644
--- a/runtime/test/generated/all_generated_V1_2_vts_tests.cpp
+++ b/runtime/test/generated/all_generated_V1_2_vts_tests.cpp
@@ -751,6 +751,42 @@
                            prelu::examples_weight_as_input_quant8);
 }
 
+// Generated from: quantize.mod.py.
+namespace quantize {
+// Generated quantize test
+#include "examples/quantize.example.cpp"
+// Generated model constructor
+#include "vts_models/quantize.model.cpp"
+} // namespace quantize
+
+TEST_F(NeuralnetworksHidlTest, quantize_quant8) {
+  generated_tests::Execute(device,
+                           quantize::createTestModel_quant8,
+                           quantize::is_ignored_quant8,
+                           quantize::examples_quant8);
+}
+
+TEST_F(NeuralnetworksHidlTest, quantize_quant8_2) {
+  generated_tests::Execute(device,
+                           quantize::createTestModel_quant8_2,
+                           quantize::is_ignored_quant8_2,
+                           quantize::examples_quant8_2);
+}
+
+TEST_F(NeuralnetworksHidlTest, quantize_quant8_3) {
+  generated_tests::Execute(device,
+                           quantize::createTestModel_quant8_3,
+                           quantize::is_ignored_quant8_3,
+                           quantize::examples_quant8_3);
+}
+
+TEST_F(NeuralnetworksHidlTest, quantize_quant8_4) {
+  generated_tests::Execute(device,
+                           quantize::createTestModel_quant8_4,
+                           quantize::is_ignored_quant8_4,
+                           quantize::examples_quant8_4);
+}
+
 // Generated from: roi_align.mod.py.
 namespace roi_align {
 // Generated roi_align test
diff --git a/runtime/test/generated/examples/quantize.example.cpp b/runtime/test/generated/examples/quantize.example.cpp
new file mode 100644
index 0000000..6af743b
--- /dev/null
+++ b/runtime/test/generated/examples/quantize.example.cpp
@@ -0,0 +1,98 @@
+// clang-format off
+// Generated file (from: quantize.mod.py). Do not edit
+std::vector<MixedTypedExample> examples_quant8 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+  // int -> FLOAT32 map
+  {{0, {-10.0f, -9.933110367892976f, -9.866220735785953f, -9.799331103678929f, -9.732441471571907f, -9.665551839464882f, -9.59866220735786f, -9.531772575250836f, -9.464882943143813f, -9.397993311036789f, -9.331103678929766f, -9.264214046822742f, -9.19732441471572f, -9.130434782608695f, -9.063545150501673f, -8.996655518394649f, -8.929765886287626f, -8.862876254180602f, -8.79598662207358f, -8.729096989966555f, -8.662207357859533f, -8.595317725752508f, -8.528428093645484f, -8.461538461538462f, -8.39464882943144f, -8.327759197324415f, -8.26086956521739f, -8.193979933110368f, -8.127090301003344f, -8.060200668896321f, -7.993311036789297f, -7.926421404682275f, -7.8595317725752505f, -7.792642140468228f, -7.725752508361204f, -7.658862876254181f, -7.591973244147157f, -7.5250836120401345f, -7.45819397993311f, -7.391304347826087f, -7.3244147157190636f, -7.25752508361204f, -7.190635451505017f, -7.1237458193979935f, -7.05685618729097f, -6.989966555183947f, -6.923076923076923f, -6.8561872909699f, -6.789297658862877f, -6.722408026755852f, -6.65551839464883f, -6.588628762541806f, -6.521739130434783f, -6.454849498327759f, -6.387959866220736f, -6.321070234113712f, -6.25418060200669f, -6.187290969899665f, -6.120401337792643f, -6.053511705685619f, -5.986622073578595f, -5.919732441471572f, -5.852842809364549f, -5.785953177257525f, -5.719063545150502f, -5.6521739130434785f, -5.585284280936455f, -5.518394648829432f, -5.451505016722408f, -5.384615384615385f, -5.317725752508362f, -5.250836120401338f, -5.183946488294315f, -5.117056856187292f, -5.050167224080268f, -4.983277591973244f, -4.916387959866221f, -4.849498327759197f, -4.782608695652174f, -4.7157190635451505f, -4.648829431438127f, -4.581939799331104f, -4.51505016722408f, -4.448160535117057f, -4.381270903010034f, -4.31438127090301f, -4.247491638795987f, -4.1806020066889635f, -4.11371237458194f, -4.046822742474917f, -3.9799331103678934f, -3.91304347826087f, -3.8461538461538467f, -3.7792642140468233f, -3.7123745819398f, -3.6454849498327766f, -3.5785953177257532f, -3.511705685618729f, -3.4448160535117056f, -3.3779264214046822f, -3.311036789297659f, -3.2441471571906355f, -3.177257525083612f, -3.1103678929765888f, -3.0434782608695654f, -2.976588628762542f, -2.9096989966555187f, -2.8428093645484953f, -2.775919732441472f, -2.7090301003344486f, -2.642140468227425f, -2.575250836120402f, -2.5083612040133785f, -2.441471571906355f, -2.3745819397993317f, -2.3076923076923084f, -2.240802675585285f, -2.1739130434782616f, -2.1070234113712374f, -2.040133779264214f, -1.9732441471571907f, -1.9063545150501682f, -1.839464882943144f, -1.7725752508361214f, -1.7056856187290972f, -1.6387959866220747f, -1.5719063545150505f, -1.505016722408028f, -1.4381270903010037f, -1.3712374581939795f, -1.304347826086957f, -1.2374581939799327f, -1.1705685618729103f, -1.103678929765886f, -1.0367892976588635f, -0.9698996655518393f, -0.9030100334448168f, -0.8361204013377925f, -0.76923076923077f, -0.7023411371237458f, -0.6354515050167233f, -0.5685618729096991f, -0.5016722408026766f, -0.43478260869565233f, -0.36789297658862985f, -0.3010033444816056f, -0.23411371237458312f, -0.16722408026755886f, -0.10033444816053638f, -0.03344481605351213f, 0.03344481605351213f, 0.1003344481605346f, 0.16722408026755886f, 0.23411371237458134f, 0.3010033444816056f, 0.3678929765886281f, 0.43478260869565233f, 0.5016722408026748f, 0.5685618729096991f, 0.6354515050167215f, 0.7023411371237458f, 0.7692307692307683f, 0.8361204013377925f, 0.903010033444815f, 0.9698996655518393f, 1.0367892976588617f, 1.103678929765886f, 1.1705685618729085f, 1.2374581939799327f, 1.3043478260869552f, 1.3712374581939795f, 1.4381270903010037f, 1.5050167224080262f, 1.5719063545150505f, 1.638795986622073f, 1.7056856187290972f, 1.7725752508361197f, 1.839464882943144f, 1.9063545150501664f, 1.9732441471571907f, 2.040133779264213f, 2.1070234113712374f, 2.17391304347826f, 2.240802675585284f, 2.3076923076923066f, 2.374581939799331f, 2.4414715719063533f, 2.5083612040133776f, 2.5752508361204f, 2.6421404682274243f, 2.709030100334447f, 2.775919732441471f, 2.8428093645484935f, 2.909698996655518f, 2.976588628762542f, 3.0434782608695645f, 3.1103678929765888f, 3.1772575250836113f, 3.2441471571906355f, 3.311036789297658f, 3.3779264214046822f, 3.4448160535117047f, 3.511705685618729f, 3.5785953177257515f, 3.6454849498327757f, 3.712374581939798f, 3.7792642140468224f, 3.846153846153845f, 3.913043478260869f, 3.9799331103678917f, 4.046822742474916f, 4.113712374581938f, 4.180602006688963f, 4.247491638795985f, 4.314381270903009f, 4.381270903010034f, 4.448160535117056f, 4.51505016722408f, 4.581939799331103f, 4.648829431438127f, 4.71571906354515f, 4.782608695652174f, 4.849498327759196f, 4.916387959866221f, 4.983277591973243f, 5.050167224080267f, 5.11705685618729f, 5.183946488294314f, 5.2508361204013365f, 5.317725752508361f, 5.384615384615383f, 5.4515050167224075f, 5.51839464882943f, 5.585284280936454f, 5.652173913043477f, 5.719063545150501f, 5.785953177257525f, 5.852842809364548f, 5.919732441471572f, 5.986622073578594f, 6.053511705685619f, 6.120401337792643f, 6.187290969899664f, 6.254180602006688f, 6.321070234113712f, 6.387959866220736f, 6.454849498327757f, 6.521739130434781f, 6.588628762541806f, 6.65551839464883f, 6.722408026755851f, 6.789297658862875f, 6.856187290969899f, 6.923076923076923f, 6.989966555183944f, 7.056856187290968f, 7.123745819397993f, 7.190635451505017f, 7.257525083612041f, 7.324414715719062f, 7.391304347826086f, 7.45819397993311f, 7.5250836120401345f, 7.591973244147155f, 7.6588628762541795f, 7.725752508361204f, 7.792642140468228f, 7.859531772575249f, 7.926421404682273f, 7.993311036789297f, 8.060200668896321f, 8.127090301003342f, 8.193979933110366f, 8.26086956521739f, 8.327759197324415f, 8.394648829431436f, 8.46153846153846f, 8.528428093645484f, 8.595317725752508f, 8.662207357859533f, 8.729096989966553f, 8.795986622073578f, 8.862876254180602f, 8.929765886287626f, 8.996655518394647f, 9.063545150501671f, 9.130434782608695f, 9.19732441471572f, 9.26421404682274f, 9.331103678929765f, 9.397993311036789f, 9.464882943143813f, 9.531772575250834f, 9.598662207357858f, 9.665551839464882f, 9.732441471571907f, 9.799331103678927f, 9.866220735785951f, 9.933110367892976f, 10.0f}}},
+  // int -> INT32 map
+  {},
+  // int -> QUANT8_ASYMM map
+  {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+  // int -> FLOAT32 map
+  {},
+  // int -> INT32 map
+  {},
+  // int -> QUANT8_ASYMM map
+  {{0, {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10}}}
+}
+}, // End of an example
+};
+
+std::vector<MixedTypedExample> examples_quant8_2 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+  // int -> FLOAT32 map
+  {{0, {-10.0f, -9.933110367892976f, -9.866220735785953f, -9.799331103678929f, -9.732441471571907f, -9.665551839464882f, -9.59866220735786f, -9.531772575250836f, -9.464882943143813f, -9.397993311036789f, -9.331103678929766f, -9.264214046822742f, -9.19732441471572f, -9.130434782608695f, -9.063545150501673f, -8.996655518394649f, -8.929765886287626f, -8.862876254180602f, -8.79598662207358f, -8.729096989966555f, -8.662207357859533f, -8.595317725752508f, -8.528428093645484f, -8.461538461538462f, -8.39464882943144f, -8.327759197324415f, -8.26086956521739f, -8.193979933110368f, -8.127090301003344f, -8.060200668896321f, -7.993311036789297f, -7.926421404682275f, -7.8595317725752505f, -7.792642140468228f, -7.725752508361204f, -7.658862876254181f, -7.591973244147157f, -7.5250836120401345f, -7.45819397993311f, -7.391304347826087f, -7.3244147157190636f, -7.25752508361204f, -7.190635451505017f, -7.1237458193979935f, -7.05685618729097f, -6.989966555183947f, -6.923076923076923f, -6.8561872909699f, -6.789297658862877f, -6.722408026755852f, -6.65551839464883f, -6.588628762541806f, -6.521739130434783f, -6.454849498327759f, -6.387959866220736f, -6.321070234113712f, -6.25418060200669f, -6.187290969899665f, -6.120401337792643f, -6.053511705685619f, -5.986622073578595f, -5.919732441471572f, -5.852842809364549f, -5.785953177257525f, -5.719063545150502f, -5.6521739130434785f, -5.585284280936455f, -5.518394648829432f, -5.451505016722408f, -5.384615384615385f, -5.317725752508362f, -5.250836120401338f, -5.183946488294315f, -5.117056856187292f, -5.050167224080268f, -4.983277591973244f, -4.916387959866221f, -4.849498327759197f, -4.782608695652174f, -4.7157190635451505f, -4.648829431438127f, -4.581939799331104f, -4.51505016722408f, -4.448160535117057f, -4.381270903010034f, -4.31438127090301f, -4.247491638795987f, -4.1806020066889635f, -4.11371237458194f, -4.046822742474917f, -3.9799331103678934f, -3.91304347826087f, -3.8461538461538467f, -3.7792642140468233f, -3.7123745819398f, -3.6454849498327766f, -3.5785953177257532f, -3.511705685618729f, -3.4448160535117056f, -3.3779264214046822f, -3.311036789297659f, -3.2441471571906355f, -3.177257525083612f, -3.1103678929765888f, -3.0434782608695654f, -2.976588628762542f, -2.9096989966555187f, -2.8428093645484953f, -2.775919732441472f, -2.7090301003344486f, -2.642140468227425f, -2.575250836120402f, -2.5083612040133785f, -2.441471571906355f, -2.3745819397993317f, -2.3076923076923084f, -2.240802675585285f, -2.1739130434782616f, -2.1070234113712374f, -2.040133779264214f, -1.9732441471571907f, -1.9063545150501682f, -1.839464882943144f, -1.7725752508361214f, -1.7056856187290972f, -1.6387959866220747f, -1.5719063545150505f, -1.505016722408028f, -1.4381270903010037f, -1.3712374581939795f, -1.304347826086957f, -1.2374581939799327f, -1.1705685618729103f, -1.103678929765886f, -1.0367892976588635f, -0.9698996655518393f, -0.9030100334448168f, -0.8361204013377925f, -0.76923076923077f, -0.7023411371237458f, -0.6354515050167233f, -0.5685618729096991f, -0.5016722408026766f, -0.43478260869565233f, -0.36789297658862985f, -0.3010033444816056f, -0.23411371237458312f, -0.16722408026755886f, -0.10033444816053638f, -0.03344481605351213f, 0.03344481605351213f, 0.1003344481605346f, 0.16722408026755886f, 0.23411371237458134f, 0.3010033444816056f, 0.3678929765886281f, 0.43478260869565233f, 0.5016722408026748f, 0.5685618729096991f, 0.6354515050167215f, 0.7023411371237458f, 0.7692307692307683f, 0.8361204013377925f, 0.903010033444815f, 0.9698996655518393f, 1.0367892976588617f, 1.103678929765886f, 1.1705685618729085f, 1.2374581939799327f, 1.3043478260869552f, 1.3712374581939795f, 1.4381270903010037f, 1.5050167224080262f, 1.5719063545150505f, 1.638795986622073f, 1.7056856187290972f, 1.7725752508361197f, 1.839464882943144f, 1.9063545150501664f, 1.9732441471571907f, 2.040133779264213f, 2.1070234113712374f, 2.17391304347826f, 2.240802675585284f, 2.3076923076923066f, 2.374581939799331f, 2.4414715719063533f, 2.5083612040133776f, 2.5752508361204f, 2.6421404682274243f, 2.709030100334447f, 2.775919732441471f, 2.8428093645484935f, 2.909698996655518f, 2.976588628762542f, 3.0434782608695645f, 3.1103678929765888f, 3.1772575250836113f, 3.2441471571906355f, 3.311036789297658f, 3.3779264214046822f, 3.4448160535117047f, 3.511705685618729f, 3.5785953177257515f, 3.6454849498327757f, 3.712374581939798f, 3.7792642140468224f, 3.846153846153845f, 3.913043478260869f, 3.9799331103678917f, 4.046822742474916f, 4.113712374581938f, 4.180602006688963f, 4.247491638795985f, 4.314381270903009f, 4.381270903010034f, 4.448160535117056f, 4.51505016722408f, 4.581939799331103f, 4.648829431438127f, 4.71571906354515f, 4.782608695652174f, 4.849498327759196f, 4.916387959866221f, 4.983277591973243f, 5.050167224080267f, 5.11705685618729f, 5.183946488294314f, 5.2508361204013365f, 5.317725752508361f, 5.384615384615383f, 5.4515050167224075f, 5.51839464882943f, 5.585284280936454f, 5.652173913043477f, 5.719063545150501f, 5.785953177257525f, 5.852842809364548f, 5.919732441471572f, 5.986622073578594f, 6.053511705685619f, 6.120401337792643f, 6.187290969899664f, 6.254180602006688f, 6.321070234113712f, 6.387959866220736f, 6.454849498327757f, 6.521739130434781f, 6.588628762541806f, 6.65551839464883f, 6.722408026755851f, 6.789297658862875f, 6.856187290969899f, 6.923076923076923f, 6.989966555183944f, 7.056856187290968f, 7.123745819397993f, 7.190635451505017f, 7.257525083612041f, 7.324414715719062f, 7.391304347826086f, 7.45819397993311f, 7.5250836120401345f, 7.591973244147155f, 7.6588628762541795f, 7.725752508361204f, 7.792642140468228f, 7.859531772575249f, 7.926421404682273f, 7.993311036789297f, 8.060200668896321f, 8.127090301003342f, 8.193979933110366f, 8.26086956521739f, 8.327759197324415f, 8.394648829431436f, 8.46153846153846f, 8.528428093645484f, 8.595317725752508f, 8.662207357859533f, 8.729096989966553f, 8.795986622073578f, 8.862876254180602f, 8.929765886287626f, 8.996655518394647f, 9.063545150501671f, 9.130434782608695f, 9.19732441471572f, 9.26421404682274f, 9.331103678929765f, 9.397993311036789f, 9.464882943143813f, 9.531772575250834f, 9.598662207357858f, 9.665551839464882f, 9.732441471571907f, 9.799331103678927f, 9.866220735785951f, 9.933110367892976f, 10.0f}}},
+  // int -> INT32 map
+  {},
+  // int -> QUANT8_ASYMM map
+  {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+  // int -> FLOAT32 map
+  {},
+  // int -> INT32 map
+  {},
+  // int -> QUANT8_ASYMM map
+  {{0, {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11}}}
+}
+}, // End of an example
+};
+
+std::vector<MixedTypedExample> examples_quant8_3 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+  // int -> FLOAT32 map
+  {{0, {-10.0f, -9.933110367892976f, -9.866220735785953f, -9.799331103678929f, -9.732441471571907f, -9.665551839464882f, -9.59866220735786f, -9.531772575250836f, -9.464882943143813f, -9.397993311036789f, -9.331103678929766f, -9.264214046822742f, -9.19732441471572f, -9.130434782608695f, -9.063545150501673f, -8.996655518394649f, -8.929765886287626f, -8.862876254180602f, -8.79598662207358f, -8.729096989966555f, -8.662207357859533f, -8.595317725752508f, -8.528428093645484f, -8.461538461538462f, -8.39464882943144f, -8.327759197324415f, -8.26086956521739f, -8.193979933110368f, -8.127090301003344f, -8.060200668896321f, -7.993311036789297f, -7.926421404682275f, -7.8595317725752505f, -7.792642140468228f, -7.725752508361204f, -7.658862876254181f, -7.591973244147157f, -7.5250836120401345f, -7.45819397993311f, -7.391304347826087f, -7.3244147157190636f, -7.25752508361204f, -7.190635451505017f, -7.1237458193979935f, -7.05685618729097f, -6.989966555183947f, -6.923076923076923f, -6.8561872909699f, -6.789297658862877f, -6.722408026755852f, -6.65551839464883f, -6.588628762541806f, -6.521739130434783f, -6.454849498327759f, -6.387959866220736f, -6.321070234113712f, -6.25418060200669f, -6.187290969899665f, -6.120401337792643f, -6.053511705685619f, -5.986622073578595f, -5.919732441471572f, -5.852842809364549f, -5.785953177257525f, -5.719063545150502f, -5.6521739130434785f, -5.585284280936455f, -5.518394648829432f, -5.451505016722408f, -5.384615384615385f, -5.317725752508362f, -5.250836120401338f, -5.183946488294315f, -5.117056856187292f, -5.050167224080268f, -4.983277591973244f, -4.916387959866221f, -4.849498327759197f, -4.782608695652174f, -4.7157190635451505f, -4.648829431438127f, -4.581939799331104f, -4.51505016722408f, -4.448160535117057f, -4.381270903010034f, -4.31438127090301f, -4.247491638795987f, -4.1806020066889635f, -4.11371237458194f, -4.046822742474917f, -3.9799331103678934f, -3.91304347826087f, -3.8461538461538467f, -3.7792642140468233f, -3.7123745819398f, -3.6454849498327766f, -3.5785953177257532f, -3.511705685618729f, -3.4448160535117056f, -3.3779264214046822f, -3.311036789297659f, -3.2441471571906355f, -3.177257525083612f, -3.1103678929765888f, -3.0434782608695654f, -2.976588628762542f, -2.9096989966555187f, -2.8428093645484953f, -2.775919732441472f, -2.7090301003344486f, -2.642140468227425f, -2.575250836120402f, -2.5083612040133785f, -2.441471571906355f, -2.3745819397993317f, -2.3076923076923084f, -2.240802675585285f, -2.1739130434782616f, -2.1070234113712374f, -2.040133779264214f, -1.9732441471571907f, -1.9063545150501682f, -1.839464882943144f, -1.7725752508361214f, -1.7056856187290972f, -1.6387959866220747f, -1.5719063545150505f, -1.505016722408028f, -1.4381270903010037f, -1.3712374581939795f, -1.304347826086957f, -1.2374581939799327f, -1.1705685618729103f, -1.103678929765886f, -1.0367892976588635f, -0.9698996655518393f, -0.9030100334448168f, -0.8361204013377925f, -0.76923076923077f, -0.7023411371237458f, -0.6354515050167233f, -0.5685618729096991f, -0.5016722408026766f, -0.43478260869565233f, -0.36789297658862985f, -0.3010033444816056f, -0.23411371237458312f, -0.16722408026755886f, -0.10033444816053638f, -0.03344481605351213f, 0.03344481605351213f, 0.1003344481605346f, 0.16722408026755886f, 0.23411371237458134f, 0.3010033444816056f, 0.3678929765886281f, 0.43478260869565233f, 0.5016722408026748f, 0.5685618729096991f, 0.6354515050167215f, 0.7023411371237458f, 0.7692307692307683f, 0.8361204013377925f, 0.903010033444815f, 0.9698996655518393f, 1.0367892976588617f, 1.103678929765886f, 1.1705685618729085f, 1.2374581939799327f, 1.3043478260869552f, 1.3712374581939795f, 1.4381270903010037f, 1.5050167224080262f, 1.5719063545150505f, 1.638795986622073f, 1.7056856187290972f, 1.7725752508361197f, 1.839464882943144f, 1.9063545150501664f, 1.9732441471571907f, 2.040133779264213f, 2.1070234113712374f, 2.17391304347826f, 2.240802675585284f, 2.3076923076923066f, 2.374581939799331f, 2.4414715719063533f, 2.5083612040133776f, 2.5752508361204f, 2.6421404682274243f, 2.709030100334447f, 2.775919732441471f, 2.8428093645484935f, 2.909698996655518f, 2.976588628762542f, 3.0434782608695645f, 3.1103678929765888f, 3.1772575250836113f, 3.2441471571906355f, 3.311036789297658f, 3.3779264214046822f, 3.4448160535117047f, 3.511705685618729f, 3.5785953177257515f, 3.6454849498327757f, 3.712374581939798f, 3.7792642140468224f, 3.846153846153845f, 3.913043478260869f, 3.9799331103678917f, 4.046822742474916f, 4.113712374581938f, 4.180602006688963f, 4.247491638795985f, 4.314381270903009f, 4.381270903010034f, 4.448160535117056f, 4.51505016722408f, 4.581939799331103f, 4.648829431438127f, 4.71571906354515f, 4.782608695652174f, 4.849498327759196f, 4.916387959866221f, 4.983277591973243f, 5.050167224080267f, 5.11705685618729f, 5.183946488294314f, 5.2508361204013365f, 5.317725752508361f, 5.384615384615383f, 5.4515050167224075f, 5.51839464882943f, 5.585284280936454f, 5.652173913043477f, 5.719063545150501f, 5.785953177257525f, 5.852842809364548f, 5.919732441471572f, 5.986622073578594f, 6.053511705685619f, 6.120401337792643f, 6.187290969899664f, 6.254180602006688f, 6.321070234113712f, 6.387959866220736f, 6.454849498327757f, 6.521739130434781f, 6.588628762541806f, 6.65551839464883f, 6.722408026755851f, 6.789297658862875f, 6.856187290969899f, 6.923076923076923f, 6.989966555183944f, 7.056856187290968f, 7.123745819397993f, 7.190635451505017f, 7.257525083612041f, 7.324414715719062f, 7.391304347826086f, 7.45819397993311f, 7.5250836120401345f, 7.591973244147155f, 7.6588628762541795f, 7.725752508361204f, 7.792642140468228f, 7.859531772575249f, 7.926421404682273f, 7.993311036789297f, 8.060200668896321f, 8.127090301003342f, 8.193979933110366f, 8.26086956521739f, 8.327759197324415f, 8.394648829431436f, 8.46153846153846f, 8.528428093645484f, 8.595317725752508f, 8.662207357859533f, 8.729096989966553f, 8.795986622073578f, 8.862876254180602f, 8.929765886287626f, 8.996655518394647f, 9.063545150501671f, 9.130434782608695f, 9.19732441471572f, 9.26421404682274f, 9.331103678929765f, 9.397993311036789f, 9.464882943143813f, 9.531772575250834f, 9.598662207357858f, 9.665551839464882f, 9.732441471571907f, 9.799331103678927f, 9.866220735785951f, 9.933110367892976f, 10.0f}}},
+  // int -> INT32 map
+  {},
+  // int -> QUANT8_ASYMM map
+  {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+  // int -> FLOAT32 map
+  {},
+  // int -> INT32 map
+  {},
+  // int -> QUANT8_ASYMM map
+  {{0, {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 10, 16, 23, 30, 36, 43, 50, 56, 63, 70, 77, 83, 90, 97, 103, 110, 117, 123, 130, 137, 143, 150, 157, 163, 170, 177, 184, 190, 197, 204, 210, 217, 224, 230, 237, 244, 250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}}}
+}
+}, // End of an example
+};
+
+std::vector<MixedTypedExample> examples_quant8_4 = {
+// Begin of an example
+{
+//Input(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+  // int -> FLOAT32 map
+  {{0, {-10.0f, -9.933110367892976f, -9.866220735785953f, -9.799331103678929f, -9.732441471571907f, -9.665551839464882f, -9.59866220735786f, -9.531772575250836f, -9.464882943143813f, -9.397993311036789f, -9.331103678929766f, -9.264214046822742f, -9.19732441471572f, -9.130434782608695f, -9.063545150501673f, -8.996655518394649f, -8.929765886287626f, -8.862876254180602f, -8.79598662207358f, -8.729096989966555f, -8.662207357859533f, -8.595317725752508f, -8.528428093645484f, -8.461538461538462f, -8.39464882943144f, -8.327759197324415f, -8.26086956521739f, -8.193979933110368f, -8.127090301003344f, -8.060200668896321f, -7.993311036789297f, -7.926421404682275f, -7.8595317725752505f, -7.792642140468228f, -7.725752508361204f, -7.658862876254181f, -7.591973244147157f, -7.5250836120401345f, -7.45819397993311f, -7.391304347826087f, -7.3244147157190636f, -7.25752508361204f, -7.190635451505017f, -7.1237458193979935f, -7.05685618729097f, -6.989966555183947f, -6.923076923076923f, -6.8561872909699f, -6.789297658862877f, -6.722408026755852f, -6.65551839464883f, -6.588628762541806f, -6.521739130434783f, -6.454849498327759f, -6.387959866220736f, -6.321070234113712f, -6.25418060200669f, -6.187290969899665f, -6.120401337792643f, -6.053511705685619f, -5.986622073578595f, -5.919732441471572f, -5.852842809364549f, -5.785953177257525f, -5.719063545150502f, -5.6521739130434785f, -5.585284280936455f, -5.518394648829432f, -5.451505016722408f, -5.384615384615385f, -5.317725752508362f, -5.250836120401338f, -5.183946488294315f, -5.117056856187292f, -5.050167224080268f, -4.983277591973244f, -4.916387959866221f, -4.849498327759197f, -4.782608695652174f, -4.7157190635451505f, -4.648829431438127f, -4.581939799331104f, -4.51505016722408f, -4.448160535117057f, -4.381270903010034f, -4.31438127090301f, -4.247491638795987f, -4.1806020066889635f, -4.11371237458194f, -4.046822742474917f, -3.9799331103678934f, -3.91304347826087f, -3.8461538461538467f, -3.7792642140468233f, -3.7123745819398f, -3.6454849498327766f, -3.5785953177257532f, -3.511705685618729f, -3.4448160535117056f, -3.3779264214046822f, -3.311036789297659f, -3.2441471571906355f, -3.177257525083612f, -3.1103678929765888f, -3.0434782608695654f, -2.976588628762542f, -2.9096989966555187f, -2.8428093645484953f, -2.775919732441472f, -2.7090301003344486f, -2.642140468227425f, -2.575250836120402f, -2.5083612040133785f, -2.441471571906355f, -2.3745819397993317f, -2.3076923076923084f, -2.240802675585285f, -2.1739130434782616f, -2.1070234113712374f, -2.040133779264214f, -1.9732441471571907f, -1.9063545150501682f, -1.839464882943144f, -1.7725752508361214f, -1.7056856187290972f, -1.6387959866220747f, -1.5719063545150505f, -1.505016722408028f, -1.4381270903010037f, -1.3712374581939795f, -1.304347826086957f, -1.2374581939799327f, -1.1705685618729103f, -1.103678929765886f, -1.0367892976588635f, -0.9698996655518393f, -0.9030100334448168f, -0.8361204013377925f, -0.76923076923077f, -0.7023411371237458f, -0.6354515050167233f, -0.5685618729096991f, -0.5016722408026766f, -0.43478260869565233f, -0.36789297658862985f, -0.3010033444816056f, -0.23411371237458312f, -0.16722408026755886f, -0.10033444816053638f, -0.03344481605351213f, 0.03344481605351213f, 0.1003344481605346f, 0.16722408026755886f, 0.23411371237458134f, 0.3010033444816056f, 0.3678929765886281f, 0.43478260869565233f, 0.5016722408026748f, 0.5685618729096991f, 0.6354515050167215f, 0.7023411371237458f, 0.7692307692307683f, 0.8361204013377925f, 0.903010033444815f, 0.9698996655518393f, 1.0367892976588617f, 1.103678929765886f, 1.1705685618729085f, 1.2374581939799327f, 1.3043478260869552f, 1.3712374581939795f, 1.4381270903010037f, 1.5050167224080262f, 1.5719063545150505f, 1.638795986622073f, 1.7056856187290972f, 1.7725752508361197f, 1.839464882943144f, 1.9063545150501664f, 1.9732441471571907f, 2.040133779264213f, 2.1070234113712374f, 2.17391304347826f, 2.240802675585284f, 2.3076923076923066f, 2.374581939799331f, 2.4414715719063533f, 2.5083612040133776f, 2.5752508361204f, 2.6421404682274243f, 2.709030100334447f, 2.775919732441471f, 2.8428093645484935f, 2.909698996655518f, 2.976588628762542f, 3.0434782608695645f, 3.1103678929765888f, 3.1772575250836113f, 3.2441471571906355f, 3.311036789297658f, 3.3779264214046822f, 3.4448160535117047f, 3.511705685618729f, 3.5785953177257515f, 3.6454849498327757f, 3.712374581939798f, 3.7792642140468224f, 3.846153846153845f, 3.913043478260869f, 3.9799331103678917f, 4.046822742474916f, 4.113712374581938f, 4.180602006688963f, 4.247491638795985f, 4.314381270903009f, 4.381270903010034f, 4.448160535117056f, 4.51505016722408f, 4.581939799331103f, 4.648829431438127f, 4.71571906354515f, 4.782608695652174f, 4.849498327759196f, 4.916387959866221f, 4.983277591973243f, 5.050167224080267f, 5.11705685618729f, 5.183946488294314f, 5.2508361204013365f, 5.317725752508361f, 5.384615384615383f, 5.4515050167224075f, 5.51839464882943f, 5.585284280936454f, 5.652173913043477f, 5.719063545150501f, 5.785953177257525f, 5.852842809364548f, 5.919732441471572f, 5.986622073578594f, 6.053511705685619f, 6.120401337792643f, 6.187290969899664f, 6.254180602006688f, 6.321070234113712f, 6.387959866220736f, 6.454849498327757f, 6.521739130434781f, 6.588628762541806f, 6.65551839464883f, 6.722408026755851f, 6.789297658862875f, 6.856187290969899f, 6.923076923076923f, 6.989966555183944f, 7.056856187290968f, 7.123745819397993f, 7.190635451505017f, 7.257525083612041f, 7.324414715719062f, 7.391304347826086f, 7.45819397993311f, 7.5250836120401345f, 7.591973244147155f, 7.6588628762541795f, 7.725752508361204f, 7.792642140468228f, 7.859531772575249f, 7.926421404682273f, 7.993311036789297f, 8.060200668896321f, 8.127090301003342f, 8.193979933110366f, 8.26086956521739f, 8.327759197324415f, 8.394648829431436f, 8.46153846153846f, 8.528428093645484f, 8.595317725752508f, 8.662207357859533f, 8.729096989966553f, 8.795986622073578f, 8.862876254180602f, 8.929765886287626f, 8.996655518394647f, 9.063545150501671f, 9.130434782608695f, 9.19732441471572f, 9.26421404682274f, 9.331103678929765f, 9.397993311036789f, 9.464882943143813f, 9.531772575250834f, 9.598662207357858f, 9.665551839464882f, 9.732441471571907f, 9.799331103678927f, 9.866220735785951f, 9.933110367892976f, 10.0f}}},
+  // int -> INT32 map
+  {},
+  // int -> QUANT8_ASYMM map
+  {}
+},
+//Output(s)
+{ // See tools/test_generator/include/TestHarness.h:MixedTyped
+  // int -> FLOAT32 map
+  {},
+  // int -> INT32 map
+  {},
+  // int -> QUANT8_ASYMM map
+  {{0, {119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121}}}
+}
+}, // End of an example
+};
+
diff --git a/runtime/test/generated/models/quantize.model.cpp b/runtime/test/generated/models/quantize.model.cpp
new file mode 100644
index 0000000..0672f07
--- /dev/null
+++ b/runtime/test/generated/models/quantize.model.cpp
@@ -0,0 +1,88 @@
+// clang-format off
+// Generated file (from: quantize.mod.py). Do not edit
+void CreateModel_quant8(Model *model) {
+  OperandType type0(Type::TENSOR_FLOAT32, {300});
+  OperandType type1(Type::TENSOR_QUANT8_ASYMM, {300}, 1.0f, 0);
+  // Phase 1, operands
+  auto input0 = model->addOperand(&type0);
+  auto output0 = model->addOperand(&type1);
+  // Phase 2, operations
+  model->addOperation(ANEURALNETWORKS_QUANTIZE, {input0}, {output0});
+  // Phase 3, inputs and outputs
+  model->identifyInputsAndOutputs(
+    {input0},
+    {output0});
+  assert(model->isValid());
+}
+
+bool is_ignored_quant8(int i) {
+  static std::set<int> ignore = {};
+  return ignore.find(i) != ignore.end();
+}
+
+void CreateModel_quant8_2(Model *model) {
+  OperandType type0(Type::TENSOR_FLOAT32, {300});
+  OperandType type1(Type::TENSOR_QUANT8_ASYMM, {300}, 1.0f, 0);
+  OperandType type2(Type::TENSOR_QUANT8_ASYMM, {300}, 1.0f, 1);
+  // Phase 1, operands
+  auto input01 = model->addOperand(&type0);
+  auto output01 = model->addOperand(&type2);
+  // Phase 2, operations
+  model->addOperation(ANEURALNETWORKS_QUANTIZE, {input01}, {output01});
+  // Phase 3, inputs and outputs
+  model->identifyInputsAndOutputs(
+    {input01},
+    {output01});
+  assert(model->isValid());
+}
+
+bool is_ignored_quant8_2(int i) {
+  static std::set<int> ignore = {};
+  return ignore.find(i) != ignore.end();
+}
+
+void CreateModel_quant8_3(Model *model) {
+  OperandType type0(Type::TENSOR_FLOAT32, {300});
+  OperandType type1(Type::TENSOR_QUANT8_ASYMM, {300}, 1.0f, 0);
+  OperandType type2(Type::TENSOR_QUANT8_ASYMM, {300}, 1.0f, 1);
+  OperandType type3(Type::TENSOR_QUANT8_ASYMM, {300}, 0.01f, 120);
+  // Phase 1, operands
+  auto input02 = model->addOperand(&type0);
+  auto output02 = model->addOperand(&type3);
+  // Phase 2, operations
+  model->addOperation(ANEURALNETWORKS_QUANTIZE, {input02}, {output02});
+  // Phase 3, inputs and outputs
+  model->identifyInputsAndOutputs(
+    {input02},
+    {output02});
+  assert(model->isValid());
+}
+
+bool is_ignored_quant8_3(int i) {
+  static std::set<int> ignore = {};
+  return ignore.find(i) != ignore.end();
+}
+
+void CreateModel_quant8_4(Model *model) {
+  OperandType type0(Type::TENSOR_FLOAT32, {300});
+  OperandType type1(Type::TENSOR_QUANT8_ASYMM, {300}, 1.0f, 0);
+  OperandType type2(Type::TENSOR_QUANT8_ASYMM, {300}, 1.0f, 1);
+  OperandType type3(Type::TENSOR_QUANT8_ASYMM, {300}, 0.01f, 120);
+  OperandType type4(Type::TENSOR_QUANT8_ASYMM, {300}, 10.0f, 120);
+  // Phase 1, operands
+  auto input03 = model->addOperand(&type0);
+  auto output03 = model->addOperand(&type4);
+  // Phase 2, operations
+  model->addOperation(ANEURALNETWORKS_QUANTIZE, {input03}, {output03});
+  // Phase 3, inputs and outputs
+  model->identifyInputsAndOutputs(
+    {input03},
+    {output03});
+  assert(model->isValid());
+}
+
+bool is_ignored_quant8_4(int i) {
+  static std::set<int> ignore = {};
+  return ignore.find(i) != ignore.end();
+}
+
diff --git a/runtime/test/generated/tests/quantize.mod.py.cpp b/runtime/test/generated/tests/quantize.mod.py.cpp
new file mode 100644
index 0000000..c3ec212
--- /dev/null
+++ b/runtime/test/generated/tests/quantize.mod.py.cpp
@@ -0,0 +1,35 @@
+// clang-format off
+// Generated file (from: quantize.mod.py). Do not edit
+#include "../../TestGenerated.h"
+
+namespace quantize {
+// Generated quantize test
+#include "generated/examples/quantize.example.cpp"
+// Generated model constructor
+#include "generated/models/quantize.model.cpp"
+} // namespace quantize
+
+TEST_F(GeneratedTests, quantize_quant8) {
+    execute(quantize::CreateModel_quant8,
+            quantize::is_ignored_quant8,
+            quantize::examples_quant8);
+}
+
+TEST_F(GeneratedTests, quantize_quant8_2) {
+    execute(quantize::CreateModel_quant8_2,
+            quantize::is_ignored_quant8_2,
+            quantize::examples_quant8_2);
+}
+
+TEST_F(GeneratedTests, quantize_quant8_3) {
+    execute(quantize::CreateModel_quant8_3,
+            quantize::is_ignored_quant8_3,
+            quantize::examples_quant8_3);
+}
+
+TEST_F(GeneratedTests, quantize_quant8_4) {
+    execute(quantize::CreateModel_quant8_4,
+            quantize::is_ignored_quant8_4,
+            quantize::examples_quant8_4);
+}
+
diff --git a/runtime/test/generated/vts_models/quantize.model.cpp b/runtime/test/generated/vts_models/quantize.model.cpp
new file mode 100644
index 0000000..665240d
--- /dev/null
+++ b/runtime/test/generated/vts_models/quantize.model.cpp
@@ -0,0 +1,206 @@
+// clang-format off
+// Generated file (from: quantize.mod.py). Do not edit
+// Create the model
+Model createTestModel_quant8() {
+    const std::vector<Operand> operands = {
+        {
+            .type = OperandType::TENSOR_FLOAT32,
+            .dimensions = {300},
+            .numberOfConsumers = 1,
+            .scale = 0.0f,
+            .zeroPoint = 0,
+            .lifetime = OperandLifeTime::MODEL_INPUT,
+            .location = {.poolIndex = 0, .offset = 0, .length = 0},
+        },
+        {
+            .type = OperandType::TENSOR_QUANT8_ASYMM,
+            .dimensions = {300},
+            .numberOfConsumers = 0,
+            .scale = 1.0f,
+            .zeroPoint = 0,
+            .lifetime = OperandLifeTime::MODEL_OUTPUT,
+            .location = {.poolIndex = 0, .offset = 0, .length = 0},
+        }
+    };
+
+    const std::vector<Operation> operations = {
+        {
+            .type = OperationType::QUANTIZE,
+            .inputs = {0},
+            .outputs = {1},
+        }
+    };
+
+    const std::vector<uint32_t> inputIndexes = {0};
+    const std::vector<uint32_t> outputIndexes = {1};
+    std::vector<uint8_t> operandValues = {};
+    const std::vector<hidl_memory> pools = {};
+
+    return {
+        .operands = operands,
+        .operations = operations,
+        .inputIndexes = inputIndexes,
+        .outputIndexes = outputIndexes,
+        .operandValues = operandValues,
+        .pools = pools,
+    };
+}
+
+bool is_ignored_quant8(int i) {
+  static std::set<int> ignore = {};
+  return ignore.find(i) != ignore.end();
+}
+
+// Create the model
+Model createTestModel_quant8_2() {
+    const std::vector<Operand> operands = {
+        {
+            .type = OperandType::TENSOR_FLOAT32,
+            .dimensions = {300},
+            .numberOfConsumers = 1,
+            .scale = 0.0f,
+            .zeroPoint = 0,
+            .lifetime = OperandLifeTime::MODEL_INPUT,
+            .location = {.poolIndex = 0, .offset = 0, .length = 0},
+        },
+        {
+            .type = OperandType::TENSOR_QUANT8_ASYMM,
+            .dimensions = {300},
+            .numberOfConsumers = 0,
+            .scale = 1.0f,
+            .zeroPoint = 1,
+            .lifetime = OperandLifeTime::MODEL_OUTPUT,
+            .location = {.poolIndex = 0, .offset = 0, .length = 0},
+        }
+    };
+
+    const std::vector<Operation> operations = {
+        {
+            .type = OperationType::QUANTIZE,
+            .inputs = {0},
+            .outputs = {1},
+        }
+    };
+
+    const std::vector<uint32_t> inputIndexes = {0};
+    const std::vector<uint32_t> outputIndexes = {1};
+    std::vector<uint8_t> operandValues = {};
+    const std::vector<hidl_memory> pools = {};
+
+    return {
+        .operands = operands,
+        .operations = operations,
+        .inputIndexes = inputIndexes,
+        .outputIndexes = outputIndexes,
+        .operandValues = operandValues,
+        .pools = pools,
+    };
+}
+
+bool is_ignored_quant8_2(int i) {
+  static std::set<int> ignore = {};
+  return ignore.find(i) != ignore.end();
+}
+
+// Create the model
+Model createTestModel_quant8_3() {
+    const std::vector<Operand> operands = {
+        {
+            .type = OperandType::TENSOR_FLOAT32,
+            .dimensions = {300},
+            .numberOfConsumers = 1,
+            .scale = 0.0f,
+            .zeroPoint = 0,
+            .lifetime = OperandLifeTime::MODEL_INPUT,
+            .location = {.poolIndex = 0, .offset = 0, .length = 0},
+        },
+        {
+            .type = OperandType::TENSOR_QUANT8_ASYMM,
+            .dimensions = {300},
+            .numberOfConsumers = 0,
+            .scale = 0.01f,
+            .zeroPoint = 120,
+            .lifetime = OperandLifeTime::MODEL_OUTPUT,
+            .location = {.poolIndex = 0, .offset = 0, .length = 0},
+        }
+    };
+
+    const std::vector<Operation> operations = {
+        {
+            .type = OperationType::QUANTIZE,
+            .inputs = {0},
+            .outputs = {1},
+        }
+    };
+
+    const std::vector<uint32_t> inputIndexes = {0};
+    const std::vector<uint32_t> outputIndexes = {1};
+    std::vector<uint8_t> operandValues = {};
+    const std::vector<hidl_memory> pools = {};
+
+    return {
+        .operands = operands,
+        .operations = operations,
+        .inputIndexes = inputIndexes,
+        .outputIndexes = outputIndexes,
+        .operandValues = operandValues,
+        .pools = pools,
+    };
+}
+
+bool is_ignored_quant8_3(int i) {
+  static std::set<int> ignore = {};
+  return ignore.find(i) != ignore.end();
+}
+
+// Create the model
+Model createTestModel_quant8_4() {
+    const std::vector<Operand> operands = {
+        {
+            .type = OperandType::TENSOR_FLOAT32,
+            .dimensions = {300},
+            .numberOfConsumers = 1,
+            .scale = 0.0f,
+            .zeroPoint = 0,
+            .lifetime = OperandLifeTime::MODEL_INPUT,
+            .location = {.poolIndex = 0, .offset = 0, .length = 0},
+        },
+        {
+            .type = OperandType::TENSOR_QUANT8_ASYMM,
+            .dimensions = {300},
+            .numberOfConsumers = 0,
+            .scale = 10.0f,
+            .zeroPoint = 120,
+            .lifetime = OperandLifeTime::MODEL_OUTPUT,
+            .location = {.poolIndex = 0, .offset = 0, .length = 0},
+        }
+    };
+
+    const std::vector<Operation> operations = {
+        {
+            .type = OperationType::QUANTIZE,
+            .inputs = {0},
+            .outputs = {1},
+        }
+    };
+
+    const std::vector<uint32_t> inputIndexes = {0};
+    const std::vector<uint32_t> outputIndexes = {1};
+    std::vector<uint8_t> operandValues = {};
+    const std::vector<hidl_memory> pools = {};
+
+    return {
+        .operands = operands,
+        .operations = operations,
+        .inputIndexes = inputIndexes,
+        .outputIndexes = outputIndexes,
+        .operandValues = operandValues,
+        .pools = pools,
+    };
+}
+
+bool is_ignored_quant8_4(int i) {
+  static std::set<int> ignore = {};
+  return ignore.find(i) != ignore.end();
+}
+
diff --git a/runtime/test/specs/V1_2/quantize.mod.py b/runtime/test/specs/V1_2/quantize.mod.py
new file mode 100644
index 0000000..a7753ae
--- /dev/null
+++ b/runtime/test/specs/V1_2/quantize.mod.py
@@ -0,0 +1,38 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import numpy as np
+
+num_values = 300
+values = list(np.linspace(-10, 10, num_values))
+
+for scale, offset in [(1.0, 0),
+                      (1.0, 1),
+                      (0.01, 120),
+                      (10.0, 120)]:
+  input0 = Input("input0", "TENSOR_FLOAT32", "{%d}" % num_values)
+  output0 = Output("output0", "TENSOR_FLOAT32", "{%d}" % num_values)
+
+  model = Model().Operation("QUANTIZE", input0).To(output0)
+
+  quantizeOutput = DataTypeConverter().Identify({
+      output0: ["TENSOR_QUANT8_ASYMM", scale, offset],
+  })
+
+  Example({
+      input0: values,
+      output0: values,
+  }).AddVariations(quantizeOutput, includeDefault=False)